[ 565.101606] env[62522]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=62522) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 565.101949] env[62522]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=62522) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 565.102163] env[62522]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=62522) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 565.102380] env[62522]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 565.193701] env[62522]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=62522) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:349}} [ 565.204373] env[62522]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.011s {{(pid=62522) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:372}} [ 565.803790] env[62522]: INFO nova.virt.driver [None req-9b5e0bda-c211-4e0b-87fa-1cb2d9eaa0ba None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 565.874374] env[62522]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.874531] env[62522]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.874629] env[62522]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=62522) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 568.968061] env[62522]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-c369d3f4-ae35-4848-8ad6-0c1e5393aed1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.984155] env[62522]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=62522) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 568.984288] env[62522]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-471908c5-0088-4ebf-b236-1606f31da3d7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.016246] env[62522]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 976d0. [ 569.016406] env[62522]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.142s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.016950] env[62522]: INFO nova.virt.vmwareapi.driver [None req-9b5e0bda-c211-4e0b-87fa-1cb2d9eaa0ba None None] VMware vCenter version: 7.0.3 [ 569.020477] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ace0e8fa-3607-409b-a38a-145e084af060 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.038011] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0a10e5c-653f-4f53-a1ce-2256d45b595f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.044212] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a174ee6f-010f-4cce-b697-7eefe78db78f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.051076] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-775ba664-cc22-418a-8ae7-dbf75c807900 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.064067] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-849c5c5c-8b36-4d27-95d0-37c601b192b0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.070007] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7291baa6-5bef-4c8b-973e-df8ebe946dbb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.100269] env[62522]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-fdb97890-1822-493b-a814-cfe91df6a4d0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.105007] env[62522]: DEBUG nova.virt.vmwareapi.driver [None req-9b5e0bda-c211-4e0b-87fa-1cb2d9eaa0ba None None] Extension org.openstack.compute already exists. {{(pid=62522) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 569.107727] env[62522]: INFO nova.compute.provider_config [None req-9b5e0bda-c211-4e0b-87fa-1cb2d9eaa0ba None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 569.611461] env[62522]: DEBUG nova.context [None req-9b5e0bda-c211-4e0b-87fa-1cb2d9eaa0ba None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),bb3f01c5-745c-434b-84ce-7ba7abb4034d(cell1) {{(pid=62522) load_cells /opt/stack/nova/nova/context.py:464}} [ 569.613630] env[62522]: DEBUG oslo_concurrency.lockutils [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.613863] env[62522]: DEBUG oslo_concurrency.lockutils [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.614652] env[62522]: DEBUG oslo_concurrency.lockutils [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.615091] env[62522]: DEBUG oslo_concurrency.lockutils [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] Acquiring lock "bb3f01c5-745c-434b-84ce-7ba7abb4034d" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.615309] env[62522]: DEBUG oslo_concurrency.lockutils [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] Lock "bb3f01c5-745c-434b-84ce-7ba7abb4034d" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.616316] env[62522]: DEBUG oslo_concurrency.lockutils [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] Lock "bb3f01c5-745c-434b-84ce-7ba7abb4034d" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.636785] env[62522]: INFO dbcounter [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] Registered counter for database nova_cell0 [ 569.644776] env[62522]: INFO dbcounter [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] Registered counter for database nova_cell1 [ 569.647974] env[62522]: DEBUG oslo_db.sqlalchemy.engines [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62522) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:395}} [ 569.648352] env[62522]: DEBUG oslo_db.sqlalchemy.engines [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62522) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:395}} [ 569.653052] env[62522]: ERROR nova.db.main.api [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 569.653052] env[62522]: result = function(*args, **kwargs) [ 569.653052] env[62522]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 569.653052] env[62522]: return func(*args, **kwargs) [ 569.653052] env[62522]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 569.653052] env[62522]: result = fn(*args, **kwargs) [ 569.653052] env[62522]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 569.653052] env[62522]: return f(*args, **kwargs) [ 569.653052] env[62522]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 569.653052] env[62522]: return db.service_get_minimum_version(context, binaries) [ 569.653052] env[62522]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 569.653052] env[62522]: _check_db_access() [ 569.653052] env[62522]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 569.653052] env[62522]: stacktrace = ''.join(traceback.format_stack()) [ 569.653052] env[62522]: [ 569.654016] env[62522]: ERROR nova.db.main.api [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 569.654016] env[62522]: result = function(*args, **kwargs) [ 569.654016] env[62522]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 569.654016] env[62522]: return func(*args, **kwargs) [ 569.654016] env[62522]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 569.654016] env[62522]: result = fn(*args, **kwargs) [ 569.654016] env[62522]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 569.654016] env[62522]: return f(*args, **kwargs) [ 569.654016] env[62522]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 569.654016] env[62522]: return db.service_get_minimum_version(context, binaries) [ 569.654016] env[62522]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 569.654016] env[62522]: _check_db_access() [ 569.654016] env[62522]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 569.654016] env[62522]: stacktrace = ''.join(traceback.format_stack()) [ 569.654016] env[62522]: [ 569.654407] env[62522]: WARNING nova.objects.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 569.654538] env[62522]: WARNING nova.objects.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] Failed to get minimum service version for cell bb3f01c5-745c-434b-84ce-7ba7abb4034d [ 569.654957] env[62522]: DEBUG oslo_concurrency.lockutils [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] Acquiring lock "singleton_lock" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.655177] env[62522]: DEBUG oslo_concurrency.lockutils [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] Acquired lock "singleton_lock" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.655408] env[62522]: DEBUG oslo_concurrency.lockutils [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] Releasing lock "singleton_lock" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 569.655724] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] Full set of CONF: {{(pid=62522) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 569.655869] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ******************************************************************************** {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 569.655997] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] Configuration options gathered from: {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 569.656148] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 569.656344] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 569.656472] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ================================================================================ {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 569.656677] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] allow_resize_to_same_host = True {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.656846] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] arq_binding_timeout = 300 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.656977] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] backdoor_port = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.657136] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] backdoor_socket = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.657315] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] block_device_allocate_retries = 60 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.657481] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] block_device_allocate_retries_interval = 3 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.657648] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cert = self.pem {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.657815] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.657981] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] compute_monitors = [] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.658184] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] config_dir = [] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.658365] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] config_drive_format = iso9660 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.658499] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.658661] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] config_source = [] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.658824] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] console_host = devstack {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.658984] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] control_exchange = nova {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.659155] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cpu_allocation_ratio = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.659315] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] daemon = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.659494] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] debug = True {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.659652] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] default_access_ip_network_name = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.659820] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] default_availability_zone = nova {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.659971] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] default_ephemeral_format = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.660145] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] default_green_pool_size = 1000 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.660385] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.660550] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] default_schedule_zone = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.660709] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] disk_allocation_ratio = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.660866] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] enable_new_services = True {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.661053] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] enabled_apis = ['osapi_compute'] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.661224] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] enabled_ssl_apis = [] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.661384] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] flat_injected = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.661540] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] force_config_drive = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.661696] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] force_raw_images = True {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.661861] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] graceful_shutdown_timeout = 5 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.662029] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] heal_instance_info_cache_interval = 60 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.662265] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] host = cpu-1 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.662445] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.662610] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] initial_disk_allocation_ratio = 1.0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.662770] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] initial_ram_allocation_ratio = 1.0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.662990] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.663177] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] instance_build_timeout = 0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.663339] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] instance_delete_interval = 300 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.663506] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] instance_format = [instance: %(uuid)s] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.663671] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] instance_name_template = instance-%08x {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.663831] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] instance_usage_audit = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.663997] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] instance_usage_audit_period = month {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.664182] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.664349] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] instances_path = /opt/stack/data/nova/instances {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.664512] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] internal_service_availability_zone = internal {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.664666] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] key = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.664825] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] live_migration_retry_count = 30 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.664990] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] log_color = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.665188] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] log_config_append = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.665371] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.665533] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] log_dir = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.665690] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] log_file = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.665818] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] log_options = True {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.665979] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] log_rotate_interval = 1 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.666161] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] log_rotate_interval_type = days {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.666332] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] log_rotation_type = none {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.666462] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.666587] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.666753] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.666918] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.667063] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.667264] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] long_rpc_timeout = 1800 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.667440] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] max_concurrent_builds = 10 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.667591] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] max_concurrent_live_migrations = 1 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.667748] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] max_concurrent_snapshots = 5 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.667906] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] max_local_block_devices = 3 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.668078] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] max_logfile_count = 30 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.668266] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] max_logfile_size_mb = 200 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.668431] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] maximum_instance_delete_attempts = 5 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.668599] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] metadata_listen = 0.0.0.0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.668764] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] metadata_listen_port = 8775 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.668930] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] metadata_workers = 2 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.669101] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] migrate_max_retries = -1 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.669271] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] mkisofs_cmd = genisoimage {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.669478] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] my_block_storage_ip = 10.180.1.21 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.669610] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] my_ip = 10.180.1.21 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.669813] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.669977] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] network_allocate_retries = 0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.670168] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.670338] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] osapi_compute_listen = 0.0.0.0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.670504] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] osapi_compute_listen_port = 8774 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.670667] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] osapi_compute_unique_server_name_scope = {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.670834] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] osapi_compute_workers = 2 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.670995] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] password_length = 12 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.671169] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] periodic_enable = True {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.671329] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] periodic_fuzzy_delay = 60 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.671494] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] pointer_model = usbtablet {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.671656] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] preallocate_images = none {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.671813] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] publish_errors = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.671941] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] pybasedir = /opt/stack/nova {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.672106] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ram_allocation_ratio = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.672269] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] rate_limit_burst = 0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.672434] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] rate_limit_except_level = CRITICAL {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.672590] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] rate_limit_interval = 0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.672748] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] reboot_timeout = 0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.672903] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] reclaim_instance_interval = 0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.673068] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] record = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.673242] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] reimage_timeout_per_gb = 60 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.673399] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] report_interval = 120 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.673560] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] rescue_timeout = 0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.673718] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] reserved_host_cpus = 0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.673872] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] reserved_host_disk_mb = 0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.674041] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] reserved_host_memory_mb = 512 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.674206] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] reserved_huge_pages = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.674368] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] resize_confirm_window = 0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.674527] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] resize_fs_using_block_device = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.674684] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] resume_guests_state_on_host_boot = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.674851] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.675020] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] rpc_response_timeout = 60 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.675209] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] run_external_periodic_tasks = True {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.675391] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] running_deleted_instance_action = reap {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.675553] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] running_deleted_instance_poll_interval = 1800 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.675713] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] running_deleted_instance_timeout = 0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.675870] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] scheduler_instance_sync_interval = 120 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.676049] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] service_down_time = 720 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.676248] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] servicegroup_driver = db {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.676376] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] shell_completion = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.676539] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] shelved_offload_time = 0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.676698] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] shelved_poll_interval = 3600 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.676866] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] shutdown_timeout = 0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.677035] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] source_is_ipv6 = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.677238] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ssl_only = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.677506] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.677687] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] sync_power_state_interval = 600 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.677852] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] sync_power_state_pool_size = 1000 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.678033] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] syslog_log_facility = LOG_USER {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.678216] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] tempdir = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.678388] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] timeout_nbd = 10 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.678555] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] transport_url = **** {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.678714] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] update_resources_interval = 0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.678871] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] use_cow_images = True {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.679037] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] use_journal = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.679205] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] use_json = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.679364] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] use_rootwrap_daemon = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.679519] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] use_stderr = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.679674] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] use_syslog = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.679826] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vcpu_pin_set = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.680026] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vif_plugging_is_fatal = True {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.680166] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vif_plugging_timeout = 300 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.680328] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] virt_mkfs = [] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.680485] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] volume_usage_poll_interval = 0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.680640] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] watch_log_file = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.680803] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] web = /usr/share/spice-html5 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 569.680983] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.681162] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.681325] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.681490] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_concurrency.disable_process_locking = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.682058] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.682259] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.682438] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.682617] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.682791] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.682962] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.683164] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api.auth_strategy = keystone {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.683347] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api.compute_link_prefix = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.683515] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.683691] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api.dhcp_domain = novalocal {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.683861] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api.enable_instance_password = True {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.684038] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api.glance_link_prefix = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.684213] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.684387] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.684551] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api.instance_list_per_project_cells = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.684711] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api.list_records_by_skipping_down_cells = True {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.684873] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api.local_metadata_per_cell = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.685050] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api.max_limit = 1000 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.685256] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api.metadata_cache_expiration = 15 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.685443] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api.neutron_default_tenant_id = default {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.685616] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api.response_validation = warn {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.685787] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api.use_neutron_default_nets = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.685957] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.686136] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.686310] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.686484] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.686654] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api.vendordata_dynamic_targets = [] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.686817] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api.vendordata_jsonfile_path = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.686998] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.687243] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.backend = dogpile.cache.memcached {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.687420] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.backend_argument = **** {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.687586] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.backend_expiration_time = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.687760] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.config_prefix = cache.oslo {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.687931] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.dead_timeout = 60.0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.688124] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.debug_cache_backend = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.688308] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.enable_retry_client = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.688476] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.enable_socket_keepalive = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.688650] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.enabled = True {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.688817] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.enforce_fips_mode = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.688986] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.expiration_time = 600 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.689168] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.hashclient_retry_attempts = 2 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.689337] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.hashclient_retry_delay = 1.0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.689501] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.memcache_dead_retry = 300 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.689661] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.memcache_password = **** {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.689825] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.689988] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.690166] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.memcache_pool_maxsize = 10 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.690330] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.690491] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.memcache_sasl_enabled = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.690670] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.690838] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.memcache_socket_timeout = 1.0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.691011] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.memcache_username = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.691181] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.proxies = [] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.691348] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.redis_db = 0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.691510] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.redis_password = **** {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.691681] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.redis_sentinel_service_name = mymaster {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.691856] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.692039] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.redis_server = localhost:6379 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.692213] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.redis_socket_timeout = 1.0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.692377] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.redis_username = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.692543] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.retry_attempts = 2 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.692709] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.retry_delay = 0.0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.692872] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.socket_keepalive_count = 1 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.693045] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.socket_keepalive_idle = 1 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.693213] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.socket_keepalive_interval = 1 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.693373] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.tls_allowed_ciphers = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.693532] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.tls_cafile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.693689] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.tls_certfile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.693852] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.tls_enabled = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.694020] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cache.tls_keyfile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.694197] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cinder.auth_section = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.694372] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cinder.auth_type = password {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.694536] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cinder.cafile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.694713] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cinder.catalog_info = volumev3::publicURL {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.694873] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cinder.certfile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.695047] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cinder.collect_timing = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.695244] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cinder.cross_az_attach = True {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.695420] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cinder.debug = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.695583] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cinder.endpoint_template = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.695750] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cinder.http_retries = 3 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.695914] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cinder.insecure = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.696086] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cinder.keyfile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.696262] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cinder.os_region_name = RegionOne {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.696430] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cinder.split_loggers = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.696589] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cinder.timeout = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.696760] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.696922] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] compute.cpu_dedicated_set = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.697094] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] compute.cpu_shared_set = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.697292] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] compute.image_type_exclude_list = [] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.697465] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.697631] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] compute.max_concurrent_disk_ops = 0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.697795] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] compute.max_disk_devices_to_attach = -1 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.697958] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.698157] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.698336] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] compute.resource_provider_association_refresh = 300 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.698499] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.698663] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] compute.shutdown_retry_interval = 10 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.698843] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.699034] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] conductor.workers = 2 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.699221] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] console.allowed_origins = [] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.699386] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] console.ssl_ciphers = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.699560] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] console.ssl_minimum_version = default {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.699729] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] consoleauth.enforce_session_timeout = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.699897] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] consoleauth.token_ttl = 600 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.700082] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cyborg.cafile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.700245] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cyborg.certfile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.700411] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cyborg.collect_timing = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.700571] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cyborg.connect_retries = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.700730] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cyborg.connect_retry_delay = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.700888] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cyborg.endpoint_override = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.701064] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cyborg.insecure = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.701226] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cyborg.keyfile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.701389] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cyborg.max_version = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.701550] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cyborg.min_version = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.701709] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cyborg.region_name = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.701866] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cyborg.retriable_status_codes = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.702036] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cyborg.service_name = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.702213] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cyborg.service_type = accelerator {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.702378] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cyborg.split_loggers = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.702538] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cyborg.status_code_retries = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.702695] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cyborg.status_code_retry_delay = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.702852] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cyborg.timeout = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.703044] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.703211] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] cyborg.version = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.703382] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] database.asyncio_connection = **** {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.703540] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] database.asyncio_slave_connection = **** {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.703708] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] database.backend = sqlalchemy {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.703876] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] database.connection = **** {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.704051] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] database.connection_debug = 0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.704225] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] database.connection_parameters = {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.704390] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] database.connection_recycle_time = 3600 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.704551] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] database.connection_trace = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.704714] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] database.db_inc_retry_interval = True {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.704876] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] database.db_max_retries = 20 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.705050] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] database.db_max_retry_interval = 10 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.705241] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] database.db_retry_interval = 1 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.705415] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] database.max_overflow = 50 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.705578] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] database.max_pool_size = 5 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.705739] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] database.max_retries = 10 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.705907] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.706078] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] database.mysql_wsrep_sync_wait = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.706242] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] database.pool_timeout = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.706406] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] database.retry_interval = 10 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.706564] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] database.slave_connection = **** {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.706721] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] database.sqlite_synchronous = True {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.706881] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] database.use_db_reconnect = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.707059] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api_database.asyncio_connection = **** {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.707267] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api_database.asyncio_slave_connection = **** {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.707449] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api_database.backend = sqlalchemy {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.707620] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api_database.connection = **** {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.707788] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api_database.connection_debug = 0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.707955] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api_database.connection_parameters = {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.708135] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api_database.connection_recycle_time = 3600 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.708323] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api_database.connection_trace = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.708493] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api_database.db_inc_retry_interval = True {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.708656] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api_database.db_max_retries = 20 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.708820] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api_database.db_max_retry_interval = 10 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.708984] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api_database.db_retry_interval = 1 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.709163] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api_database.max_overflow = 50 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.709328] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api_database.max_pool_size = 5 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.709489] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api_database.max_retries = 10 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.709655] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.709813] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.709972] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api_database.pool_timeout = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.710149] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api_database.retry_interval = 10 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.710310] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api_database.slave_connection = **** {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.710473] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] api_database.sqlite_synchronous = True {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.710647] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] devices.enabled_mdev_types = [] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.710824] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.710997] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ephemeral_storage_encryption.default_format = luks {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.711179] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ephemeral_storage_encryption.enabled = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.711349] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.711521] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] glance.api_servers = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.711685] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] glance.cafile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.711847] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] glance.certfile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.712023] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] glance.collect_timing = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.712182] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] glance.connect_retries = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.712341] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] glance.connect_retry_delay = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.712501] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] glance.debug = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.712664] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] glance.default_trusted_certificate_ids = [] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.712824] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] glance.enable_certificate_validation = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.712984] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] glance.enable_rbd_download = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.713158] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] glance.endpoint_override = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.713323] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] glance.insecure = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.713482] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] glance.keyfile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.713638] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] glance.max_version = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.713792] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] glance.min_version = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.713951] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] glance.num_retries = 3 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.714133] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] glance.rbd_ceph_conf = {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.714301] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] glance.rbd_connect_timeout = 5 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.714469] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] glance.rbd_pool = {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.714635] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] glance.rbd_user = {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.714802] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] glance.region_name = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.714962] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] glance.retriable_status_codes = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.715154] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] glance.service_name = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.715347] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] glance.service_type = image {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.715515] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] glance.split_loggers = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.715677] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] glance.status_code_retries = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.715838] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] glance.status_code_retry_delay = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.715998] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] glance.timeout = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.716195] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.716361] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] glance.verify_glance_signatures = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.716522] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] glance.version = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.716689] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] guestfs.debug = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.716854] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] manila.auth_section = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.717026] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] manila.auth_type = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.717217] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] manila.cafile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.717388] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] manila.certfile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.717552] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] manila.collect_timing = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.717713] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] manila.connect_retries = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.717870] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] manila.connect_retry_delay = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.718039] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] manila.endpoint_override = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.718217] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] manila.insecure = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.718422] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] manila.keyfile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.718587] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] manila.max_version = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.718746] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] manila.min_version = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.718905] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] manila.region_name = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.719076] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] manila.retriable_status_codes = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.719240] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] manila.service_name = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.719411] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] manila.service_type = shared-file-system {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.719575] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] manila.share_apply_policy_timeout = 10 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.719736] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] manila.split_loggers = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.719893] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] manila.status_code_retries = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.720062] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] manila.status_code_retry_delay = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.720225] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] manila.timeout = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.720405] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.720564] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] manila.version = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.720729] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] mks.enabled = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.721096] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.721291] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] image_cache.manager_interval = 2400 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.721466] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] image_cache.precache_concurrency = 1 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.721639] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] image_cache.remove_unused_base_images = True {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.721809] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.721978] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.722169] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] image_cache.subdirectory_name = _base {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.722348] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ironic.api_max_retries = 60 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.722511] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ironic.api_retry_interval = 2 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.722670] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ironic.auth_section = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.722828] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ironic.auth_type = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.722983] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ironic.cafile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.723153] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ironic.certfile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.723315] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ironic.collect_timing = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.723474] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ironic.conductor_group = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.723631] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ironic.connect_retries = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.723784] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ironic.connect_retry_delay = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.723937] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ironic.endpoint_override = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.724106] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ironic.insecure = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.724266] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ironic.keyfile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.724421] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ironic.max_version = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.724574] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ironic.min_version = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.724736] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ironic.peer_list = [] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.724892] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ironic.region_name = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.725062] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ironic.retriable_status_codes = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.725257] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ironic.serial_console_state_timeout = 10 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.725423] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ironic.service_name = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.725592] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ironic.service_type = baremetal {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.725750] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ironic.shard = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.725911] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ironic.split_loggers = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.726081] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ironic.status_code_retries = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.726242] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ironic.status_code_retry_delay = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.726401] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ironic.timeout = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.726578] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.726735] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ironic.version = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.726915] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.727105] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] key_manager.fixed_key = **** {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.727313] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.727478] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] barbican.barbican_api_version = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.727637] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] barbican.barbican_endpoint = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.727805] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] barbican.barbican_endpoint_type = public {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.727962] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] barbican.barbican_region_name = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.728134] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] barbican.cafile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.728314] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] barbican.certfile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.728482] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] barbican.collect_timing = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.728641] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] barbican.insecure = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.728795] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] barbican.keyfile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.728957] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] barbican.number_of_retries = 60 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.729134] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] barbican.retry_delay = 1 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.729299] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] barbican.send_service_user_token = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.729459] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] barbican.split_loggers = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.729616] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] barbican.timeout = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.729773] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] barbican.verify_ssl = True {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.729929] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] barbican.verify_ssl_path = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.730105] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] barbican_service_user.auth_section = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.730270] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] barbican_service_user.auth_type = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.730426] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] barbican_service_user.cafile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.730582] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] barbican_service_user.certfile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.730740] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] barbican_service_user.collect_timing = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.730898] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] barbican_service_user.insecure = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.731066] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] barbican_service_user.keyfile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.731232] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] barbican_service_user.split_loggers = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.731390] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] barbican_service_user.timeout = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.731555] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vault.approle_role_id = **** {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.731712] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vault.approle_secret_id = **** {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.731881] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vault.kv_mountpoint = secret {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.732047] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vault.kv_path = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.732214] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vault.kv_version = 2 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.732372] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vault.namespace = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.732527] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vault.root_token_id = **** {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.732683] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vault.ssl_ca_crt_file = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.732848] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vault.timeout = 60.0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.733014] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vault.use_ssl = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.733190] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.733356] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] keystone.cafile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.733518] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] keystone.certfile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.733683] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] keystone.collect_timing = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.733843] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] keystone.connect_retries = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.734011] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] keystone.connect_retry_delay = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.734173] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] keystone.endpoint_override = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.734337] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] keystone.insecure = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.734492] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] keystone.keyfile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.734649] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] keystone.max_version = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.734804] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] keystone.min_version = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.734959] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] keystone.region_name = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.735155] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] keystone.retriable_status_codes = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.735337] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] keystone.service_name = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.735511] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] keystone.service_type = identity {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.735674] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] keystone.split_loggers = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.735835] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] keystone.status_code_retries = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.735992] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] keystone.status_code_retry_delay = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.736165] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] keystone.timeout = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.736349] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.736508] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] keystone.version = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.736700] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.ceph_mount_options = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.736999] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.737226] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.connection_uri = {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.737400] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.cpu_mode = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.737569] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.cpu_model_extra_flags = [] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.737739] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.cpu_models = [] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.737910] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.cpu_power_governor_high = performance {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.738095] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.cpu_power_governor_low = powersave {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.738283] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.cpu_power_management = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.738461] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.738638] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.device_detach_attempts = 8 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.738806] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.device_detach_timeout = 20 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.738974] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.disk_cachemodes = [] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.739149] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.disk_prefix = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.739317] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.enabled_perf_events = [] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.739479] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.file_backed_memory = 0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.739643] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.gid_maps = [] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.739800] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.hw_disk_discard = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.739956] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.hw_machine_type = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.740138] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.images_rbd_ceph_conf = {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.740303] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.740465] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.740632] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.images_rbd_glance_store_name = {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.740798] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.images_rbd_pool = rbd {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.740965] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.images_type = default {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.741136] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.images_volume_group = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.741300] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.inject_key = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.741461] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.inject_partition = -2 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.741619] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.inject_password = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.741777] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.iscsi_iface = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.741934] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.iser_use_multipath = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.742107] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.live_migration_bandwidth = 0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.742273] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.742436] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.live_migration_downtime = 500 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.742597] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.742759] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.742918] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.live_migration_inbound_addr = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.743089] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.743251] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.live_migration_permit_post_copy = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.743411] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.live_migration_scheme = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.743579] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.live_migration_timeout_action = abort {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.743738] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.live_migration_tunnelled = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.743894] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.live_migration_uri = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.744064] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.live_migration_with_native_tls = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.744225] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.max_queues = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.744389] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.744608] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.744770] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.nfs_mount_options = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.745078] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.745290] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.745468] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.num_iser_scan_tries = 5 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.745631] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.num_memory_encrypted_guests = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.745795] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.745958] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.num_pcie_ports = 0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.746148] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.num_volume_scan_tries = 5 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.746320] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.pmem_namespaces = [] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.746480] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.quobyte_client_cfg = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.746790] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.746971] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.rbd_connect_timeout = 5 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.747178] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.747383] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.747555] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.rbd_secret_uuid = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.747717] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.rbd_user = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.748035] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.748077] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.remote_filesystem_transport = ssh {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.748225] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.rescue_image_id = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.748385] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.rescue_kernel_id = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.748542] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.rescue_ramdisk_id = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.748710] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.748869] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.rx_queue_size = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.749046] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.smbfs_mount_options = {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.749351] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.749528] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.snapshot_compression = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.749690] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.snapshot_image_format = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.749921] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.750137] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.sparse_logical_volumes = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.750258] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.swtpm_enabled = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.750423] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.swtpm_group = tss {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.750592] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.swtpm_user = tss {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.750761] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.sysinfo_serial = unique {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.750920] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.tb_cache_size = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.751090] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.tx_queue_size = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.751259] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.uid_maps = [] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.751424] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.use_virtio_for_bridges = True {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.751593] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.virt_type = kvm {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.751761] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.volume_clear = zero {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.751926] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.volume_clear_size = 0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.752106] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.volume_use_multipath = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.752269] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.vzstorage_cache_path = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.752436] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.752605] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.vzstorage_mount_group = qemu {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.752768] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.vzstorage_mount_opts = [] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.752934] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.753236] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.753418] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.vzstorage_mount_user = stack {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.753586] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.753757] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] neutron.auth_section = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.753932] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] neutron.auth_type = password {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.754108] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] neutron.cafile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.754274] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] neutron.certfile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.754438] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] neutron.collect_timing = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.754601] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] neutron.connect_retries = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.754759] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] neutron.connect_retry_delay = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.754929] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] neutron.default_floating_pool = public {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.755108] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] neutron.endpoint_override = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.755301] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] neutron.extension_sync_interval = 600 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.755470] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] neutron.http_retries = 3 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.755634] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] neutron.insecure = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.755793] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] neutron.keyfile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.755953] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] neutron.max_version = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.756147] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.756311] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] neutron.min_version = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.756482] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] neutron.ovs_bridge = br-int {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.756650] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] neutron.physnets = [] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.756819] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] neutron.region_name = RegionOne {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.756979] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] neutron.retriable_status_codes = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.757188] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] neutron.service_metadata_proxy = True {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.757362] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] neutron.service_name = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.757533] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] neutron.service_type = network {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.757698] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] neutron.split_loggers = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.757857] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] neutron.status_code_retries = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.758026] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] neutron.status_code_retry_delay = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.758197] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] neutron.timeout = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.758374] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.758535] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] neutron.version = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.758705] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] notifications.bdms_in_notifications = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.758881] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] notifications.default_level = INFO {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.759056] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] notifications.include_share_mapping = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.759240] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] notifications.notification_format = unversioned {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.759404] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] notifications.notify_on_state_change = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.759580] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.759755] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] pci.alias = [] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.759926] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] pci.device_spec = [] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.760108] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] pci.report_in_placement = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.760288] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.auth_section = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.760461] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.auth_type = password {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.760628] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.760789] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.cafile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.760943] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.certfile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.761118] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.collect_timing = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.761281] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.connect_retries = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.761437] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.connect_retry_delay = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.761595] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.default_domain_id = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.761751] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.default_domain_name = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.761908] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.domain_id = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.762075] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.domain_name = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.762235] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.endpoint_override = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.762396] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.insecure = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.762550] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.keyfile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.762703] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.max_version = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.762856] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.min_version = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.763030] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.password = **** {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.763196] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.project_domain_id = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.763358] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.project_domain_name = Default {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.763521] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.project_id = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.763690] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.project_name = service {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.763854] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.region_name = RegionOne {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.764026] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.retriable_status_codes = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.764192] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.service_name = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.764362] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.service_type = placement {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.764525] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.split_loggers = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.764684] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.status_code_retries = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.764843] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.status_code_retry_delay = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.765007] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.system_scope = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.765191] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.timeout = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.765361] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.trust_id = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.765520] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.user_domain_id = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.765686] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.user_domain_name = Default {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.765844] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.user_id = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.766025] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.username = nova {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.766213] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.766376] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] placement.version = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.766555] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] quota.cores = 20 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.766721] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] quota.count_usage_from_placement = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.766891] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.767072] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] quota.injected_file_content_bytes = 10240 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.767265] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] quota.injected_file_path_length = 255 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.767436] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] quota.injected_files = 5 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.767602] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] quota.instances = 10 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.767767] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] quota.key_pairs = 100 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.767932] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] quota.metadata_items = 128 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.768114] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] quota.ram = 51200 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.768284] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] quota.recheck_quota = True {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.768452] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] quota.server_group_members = 10 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.768619] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] quota.server_groups = 10 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.768821] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] quota.unified_limits_resource_list = ['servers'] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.768994] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] quota.unified_limits_resource_strategy = require {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.769189] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.769355] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.769516] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] scheduler.image_metadata_prefilter = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.769676] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.769837] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] scheduler.max_attempts = 3 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.769996] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] scheduler.max_placement_results = 1000 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.770173] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.770336] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] scheduler.query_placement_for_image_type_support = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.770496] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.770666] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] scheduler.workers = 2 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.770842] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.771016] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.771199] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.771372] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.771535] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.771696] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.771857] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.772052] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.772226] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] filter_scheduler.host_subset_size = 1 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.772393] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.772550] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.772712] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.772874] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] filter_scheduler.isolated_hosts = [] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.773047] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] filter_scheduler.isolated_images = [] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.773236] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.773402] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.773564] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.773724] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] filter_scheduler.pci_in_placement = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.773883] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.774052] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.774217] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.774377] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.774537] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.774697] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.774854] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] filter_scheduler.track_instance_changes = True {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.775043] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.775244] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] metrics.required = True {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.775416] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] metrics.weight_multiplier = 1.0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.775579] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.775742] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] metrics.weight_setting = [] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.776061] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.776242] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] serial_console.enabled = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.776423] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] serial_console.port_range = 10000:20000 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.776600] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.776764] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.776931] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] serial_console.serialproxy_port = 6083 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.777115] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] service_user.auth_section = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.777315] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] service_user.auth_type = password {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.777491] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] service_user.cafile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.777651] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] service_user.certfile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.777814] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] service_user.collect_timing = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.777972] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] service_user.insecure = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.778143] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] service_user.keyfile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.778319] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] service_user.send_service_user_token = True {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.778483] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] service_user.split_loggers = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.778641] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] service_user.timeout = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.778811] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] spice.agent_enabled = True {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.778974] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] spice.enabled = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.779288] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.779494] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.779666] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] spice.html5proxy_port = 6082 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.779828] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] spice.image_compression = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.779986] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] spice.jpeg_compression = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.780161] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] spice.playback_compression = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.780324] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] spice.require_secure = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.780494] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] spice.server_listen = 127.0.0.1 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.780664] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.780823] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] spice.streaming_mode = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.781029] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] spice.zlib_compression = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.781161] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] upgrade_levels.baseapi = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.781335] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] upgrade_levels.compute = auto {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.781496] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] upgrade_levels.conductor = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.781654] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] upgrade_levels.scheduler = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.781817] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vendordata_dynamic_auth.auth_section = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.781977] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vendordata_dynamic_auth.auth_type = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.782150] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vendordata_dynamic_auth.cafile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.782314] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vendordata_dynamic_auth.certfile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.782475] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.782636] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vendordata_dynamic_auth.insecure = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.782794] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vendordata_dynamic_auth.keyfile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.782955] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.783127] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vendordata_dynamic_auth.timeout = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.783305] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vmware.api_retry_count = 10 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.783467] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vmware.ca_file = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.783637] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vmware.cache_prefix = devstack-image-cache {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.783802] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vmware.cluster_name = testcl1 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.783967] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vmware.connection_pool_size = 10 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.784140] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vmware.console_delay_seconds = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.784312] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vmware.datastore_regex = ^datastore.* {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.784520] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.784694] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vmware.host_password = **** {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.784863] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vmware.host_port = 443 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.785043] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vmware.host_username = administrator@vsphere.local {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.785239] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vmware.insecure = True {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.785411] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vmware.integration_bridge = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.785577] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vmware.maximum_objects = 100 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.785734] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vmware.pbm_default_policy = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.785895] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vmware.pbm_enabled = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.786065] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vmware.pbm_wsdl_location = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.786239] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.786400] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vmware.serial_port_proxy_uri = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.786557] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vmware.serial_port_service_uri = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.786723] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vmware.task_poll_interval = 0.5 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.786893] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vmware.use_linked_clone = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.787072] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vmware.vnc_keymap = en-us {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.787264] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vmware.vnc_port = 5900 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.787434] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vmware.vnc_port_total = 10000 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.787622] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vnc.auth_schemes = ['none'] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.787795] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vnc.enabled = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.788101] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.788293] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.788468] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vnc.novncproxy_port = 6080 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.788664] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vnc.server_listen = 127.0.0.1 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.788846] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.789015] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vnc.vencrypt_ca_certs = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.789186] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vnc.vencrypt_client_cert = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.789348] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vnc.vencrypt_client_key = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.789520] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.789683] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] workarounds.disable_deep_image_inspection = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.789843] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.790017] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.790179] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.790343] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] workarounds.disable_rootwrap = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.790504] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] workarounds.enable_numa_live_migration = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.790666] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.790824] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.790983] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.791158] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] workarounds.libvirt_disable_apic = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.791320] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.791483] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.791644] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.791816] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.791980] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.792153] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.792314] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.792473] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.792631] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.792795] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.792977] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.793160] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] wsgi.client_socket_timeout = 900 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.793326] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] wsgi.default_pool_size = 1000 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.793492] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] wsgi.keep_alive = True {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.793658] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] wsgi.max_header_line = 16384 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.793819] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] wsgi.secure_proxy_ssl_header = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.793981] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] wsgi.ssl_ca_file = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.794160] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] wsgi.ssl_cert_file = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.794324] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] wsgi.ssl_key_file = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.794490] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] wsgi.tcp_keepidle = 600 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.794666] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.794834] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] zvm.ca_file = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.794993] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] zvm.cloud_connector_url = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.795325] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.795508] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] zvm.reachable_timeout = 300 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.795684] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.795863] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.796054] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] profiler.connection_string = messaging:// {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.796229] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] profiler.enabled = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.796403] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] profiler.es_doc_type = notification {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.796568] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] profiler.es_scroll_size = 10000 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.796735] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] profiler.es_scroll_time = 2m {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.796896] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] profiler.filter_error_trace = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.797073] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] profiler.hmac_keys = **** {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.797276] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] profiler.sentinel_service_name = mymaster {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.797452] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] profiler.socket_timeout = 0.1 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.797615] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] profiler.trace_requests = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.797774] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] profiler.trace_sqlalchemy = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.797973] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] profiler_jaeger.process_tags = {} {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.798135] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] profiler_jaeger.service_name_prefix = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.798315] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] profiler_otlp.service_name_prefix = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.798481] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] remote_debug.host = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.798641] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] remote_debug.port = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.798815] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.798976] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.799157] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.799323] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.799486] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.799647] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.799810] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.799974] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.800152] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.800324] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.800483] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.800649] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.800816] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.800980] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.801163] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.801325] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.801491] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.801663] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.801824] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.801986] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.802167] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.802336] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.802502] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.802663] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.802824] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.802986] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.803162] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.803366] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.803539] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.803705] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.ssl = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.803875] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.804053] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.804221] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.804392] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.804560] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.ssl_version = {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.804722] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.804906] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.805097] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_notifications.retry = -1 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.805308] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.805487] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_messaging_notifications.transport_url = **** {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.805660] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_limit.auth_section = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.805822] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_limit.auth_type = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.805980] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_limit.cafile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.806154] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_limit.certfile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.806318] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_limit.collect_timing = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.806475] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_limit.connect_retries = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.806632] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_limit.connect_retry_delay = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.806788] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_limit.endpoint_id = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.806957] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_limit.endpoint_interface = publicURL {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.807152] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_limit.endpoint_override = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.807327] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_limit.endpoint_region_name = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.807490] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_limit.endpoint_service_name = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.807647] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_limit.endpoint_service_type = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.807808] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_limit.insecure = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.807964] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_limit.keyfile = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.808153] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_limit.max_version = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.808345] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_limit.min_version = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.808512] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_limit.region_name = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.808672] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_limit.retriable_status_codes = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.808830] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_limit.service_name = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.808988] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_limit.service_type = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.809167] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_limit.split_loggers = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.809329] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_limit.status_code_retries = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.809487] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_limit.status_code_retry_delay = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.809642] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_limit.timeout = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.809798] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_limit.valid_interfaces = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.809953] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_limit.version = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.810129] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_reports.file_event_handler = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.810299] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.810458] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] oslo_reports.log_dir = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.810627] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.810785] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.810942] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.811127] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.811302] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.811461] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.811630] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.811788] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vif_plug_ovs_privileged.group = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.811945] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.812124] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.812289] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.812447] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] vif_plug_ovs_privileged.user = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.812614] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] os_vif_linux_bridge.flat_interface = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.812791] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.812963] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.813146] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.813318] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.813488] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.813654] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.813816] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.813991] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.814179] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] os_vif_ovs.isolate_vif = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.814352] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.814516] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.814686] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.814854] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] os_vif_ovs.ovsdb_interface = native {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.815025] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] os_vif_ovs.per_port_bridge = False {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.815224] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] privsep_osbrick.capabilities = [21] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.815395] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] privsep_osbrick.group = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.815554] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] privsep_osbrick.helper_command = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.815719] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.815882] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.816051] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] privsep_osbrick.user = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.816228] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.816390] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] nova_sys_admin.group = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.816613] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] nova_sys_admin.helper_command = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.816868] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.817071] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.817263] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] nova_sys_admin.user = None {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 569.817404] env[62522]: DEBUG oslo_service.service [None req-470a4c06-4087-4d10-a766-68b29b60f982 None None] ******************************************************************************** {{(pid=62522) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 569.817829] env[62522]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 570.321323] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-61d5af25-2402-461b-aca9-45df7e858e16 None None] Getting list of instances from cluster (obj){ [ 570.321323] env[62522]: value = "domain-c8" [ 570.321323] env[62522]: _type = "ClusterComputeResource" [ 570.321323] env[62522]: } {{(pid=62522) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 570.322411] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af35da6b-695e-42e1-8b8c-ad4b8a235aaa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.331225] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-61d5af25-2402-461b-aca9-45df7e858e16 None None] Got total of 0 instances {{(pid=62522) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 570.331795] env[62522]: WARNING nova.virt.vmwareapi.driver [None req-61d5af25-2402-461b-aca9-45df7e858e16 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 570.332280] env[62522]: INFO nova.virt.node [None req-61d5af25-2402-461b-aca9-45df7e858e16 None None] Generated node identity c7fa38b2-245d-4337-a012-22c1a01c0a72 [ 570.332511] env[62522]: INFO nova.virt.node [None req-61d5af25-2402-461b-aca9-45df7e858e16 None None] Wrote node identity c7fa38b2-245d-4337-a012-22c1a01c0a72 to /opt/stack/data/n-cpu-1/compute_id [ 570.835496] env[62522]: WARNING nova.compute.manager [None req-61d5af25-2402-461b-aca9-45df7e858e16 None None] Compute nodes ['c7fa38b2-245d-4337-a012-22c1a01c0a72'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 571.841067] env[62522]: INFO nova.compute.manager [None req-61d5af25-2402-461b-aca9-45df7e858e16 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 572.849023] env[62522]: WARNING nova.compute.manager [None req-61d5af25-2402-461b-aca9-45df7e858e16 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 572.849023] env[62522]: DEBUG oslo_concurrency.lockutils [None req-61d5af25-2402-461b-aca9-45df7e858e16 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 572.849023] env[62522]: DEBUG oslo_concurrency.lockutils [None req-61d5af25-2402-461b-aca9-45df7e858e16 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 572.849023] env[62522]: DEBUG oslo_concurrency.lockutils [None req-61d5af25-2402-461b-aca9-45df7e858e16 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 572.849023] env[62522]: DEBUG nova.compute.resource_tracker [None req-61d5af25-2402-461b-aca9-45df7e858e16 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62522) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 572.849023] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1184c42-6bd2-4075-977a-f3adce02b56d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.856686] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5556cbb1-0262-4a42-bfb5-a5eacc5e1c73 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.872197] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77a5327d-a8c1-470b-ac09-467ff4c74526 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.878407] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4dcb3a5-ef34-4f4a-a47c-7abf4ae3e13f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.908086] env[62522]: DEBUG nova.compute.resource_tracker [None req-61d5af25-2402-461b-aca9-45df7e858e16 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181224MB free_disk=150GB free_vcpus=48 pci_devices=None {{(pid=62522) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 572.908388] env[62522]: DEBUG oslo_concurrency.lockutils [None req-61d5af25-2402-461b-aca9-45df7e858e16 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 572.908691] env[62522]: DEBUG oslo_concurrency.lockutils [None req-61d5af25-2402-461b-aca9-45df7e858e16 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 573.411526] env[62522]: WARNING nova.compute.resource_tracker [None req-61d5af25-2402-461b-aca9-45df7e858e16 None None] No compute node record for cpu-1:c7fa38b2-245d-4337-a012-22c1a01c0a72: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host c7fa38b2-245d-4337-a012-22c1a01c0a72 could not be found. [ 573.915763] env[62522]: INFO nova.compute.resource_tracker [None req-61d5af25-2402-461b-aca9-45df7e858e16 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: c7fa38b2-245d-4337-a012-22c1a01c0a72 [ 575.424386] env[62522]: DEBUG nova.compute.resource_tracker [None req-61d5af25-2402-461b-aca9-45df7e858e16 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62522) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 575.424797] env[62522]: DEBUG nova.compute.resource_tracker [None req-61d5af25-2402-461b-aca9-45df7e858e16 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62522) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 575.591089] env[62522]: INFO nova.scheduler.client.report [None req-61d5af25-2402-461b-aca9-45df7e858e16 None None] [req-70958b2c-0772-48b0-952f-56870a5982e3] Created resource provider record via placement API for resource provider with UUID c7fa38b2-245d-4337-a012-22c1a01c0a72 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 575.608015] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13ff6e51-7c3f-4b3d-b33c-ee6848e25728 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.615983] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ac8c623-8f2a-480a-a8f4-caee2e0ee10e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.645864] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b615a76-9fea-4008-9dce-0c53b47d4515 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.652946] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dc7a58b-702c-4e04-8869-bc3814ecad2e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.665713] env[62522]: DEBUG nova.compute.provider_tree [None req-61d5af25-2402-461b-aca9-45df7e858e16 None None] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 576.199229] env[62522]: DEBUG nova.scheduler.client.report [None req-61d5af25-2402-461b-aca9-45df7e858e16 None None] Updated inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 576.199461] env[62522]: DEBUG nova.compute.provider_tree [None req-61d5af25-2402-461b-aca9-45df7e858e16 None None] Updating resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 generation from 0 to 1 during operation: update_inventory {{(pid=62522) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 576.199603] env[62522]: DEBUG nova.compute.provider_tree [None req-61d5af25-2402-461b-aca9-45df7e858e16 None None] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 576.248781] env[62522]: DEBUG nova.compute.provider_tree [None req-61d5af25-2402-461b-aca9-45df7e858e16 None None] Updating resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 generation from 1 to 2 during operation: update_traits {{(pid=62522) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 576.753625] env[62522]: DEBUG nova.compute.resource_tracker [None req-61d5af25-2402-461b-aca9-45df7e858e16 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62522) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 576.753884] env[62522]: DEBUG oslo_concurrency.lockutils [None req-61d5af25-2402-461b-aca9-45df7e858e16 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.845s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 576.754086] env[62522]: DEBUG nova.service [None req-61d5af25-2402-461b-aca9-45df7e858e16 None None] Creating RPC server for service compute {{(pid=62522) start /opt/stack/nova/nova/service.py:186}} [ 576.767769] env[62522]: DEBUG nova.service [None req-61d5af25-2402-461b-aca9-45df7e858e16 None None] Join ServiceGroup membership for this service compute {{(pid=62522) start /opt/stack/nova/nova/service.py:203}} [ 576.767945] env[62522]: DEBUG nova.servicegroup.drivers.db [None req-61d5af25-2402-461b-aca9-45df7e858e16 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=62522) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 606.769840] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 607.274352] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Getting list of instances from cluster (obj){ [ 607.274352] env[62522]: value = "domain-c8" [ 607.274352] env[62522]: _type = "ClusterComputeResource" [ 607.274352] env[62522]: } {{(pid=62522) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 607.275944] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5611c54c-4726-48de-838a-c2ceee41b7a1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.284611] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Got total of 0 instances {{(pid=62522) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 607.284920] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 607.285154] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Getting list of instances from cluster (obj){ [ 607.285154] env[62522]: value = "domain-c8" [ 607.285154] env[62522]: _type = "ClusterComputeResource" [ 607.285154] env[62522]: } {{(pid=62522) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 607.286102] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28623b97-1511-4ec7-89f8-398d0e5f5653 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.293859] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Got total of 0 instances {{(pid=62522) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 616.747932] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Acquiring lock "c8779822-1694-463e-bd06-5f84d867d1bd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 616.748275] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Lock "c8779822-1694-463e-bd06-5f84d867d1bd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.252929] env[62522]: DEBUG nova.compute.manager [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 617.802514] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.802792] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.804496] env[62522]: INFO nova.compute.claims [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 618.875036] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-595f81f0-ac3d-467d-9f8c-625b1a82027d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.884736] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34ffb3f8-b03e-4c08-af22-17be2ffaf8e2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.915373] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffda3844-cb82-4b6a-b19c-c6f2ed368ec2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.925300] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eeb38ad-4268-452f-b186-69378e0e2cbb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.936597] env[62522]: DEBUG nova.compute.provider_tree [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 619.230516] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Acquiring lock "a3830103-2dcb-40ac-8e62-b331fe4673ff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 619.230516] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Lock "a3830103-2dcb-40ac-8e62-b331fe4673ff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 619.440052] env[62522]: DEBUG nova.scheduler.client.report [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 619.732244] env[62522]: DEBUG nova.compute.manager [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 619.947340] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.143s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 619.947698] env[62522]: DEBUG nova.compute.manager [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 620.261198] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 620.261480] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 620.265251] env[62522]: INFO nova.compute.claims [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 620.433118] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Acquiring lock "7828f9c8-fc02-4218-ba93-5362af807dad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 620.433347] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Lock "7828f9c8-fc02-4218-ba93-5362af807dad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 620.453383] env[62522]: DEBUG nova.compute.utils [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 620.454730] env[62522]: DEBUG nova.compute.manager [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Not allocating networking since 'none' was specified. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 620.834478] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquiring lock "74b6ae10-a595-4139-8eda-38fe1aa298cf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 620.834805] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Lock "74b6ae10-a595-4139-8eda-38fe1aa298cf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 620.938843] env[62522]: DEBUG nova.compute.manager [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 620.960256] env[62522]: DEBUG nova.compute.manager [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 620.983258] env[62522]: DEBUG oslo_concurrency.lockutils [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Acquiring lock "758ed671-347a-4949-9842-2f8cdcd261ae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 620.985397] env[62522]: DEBUG oslo_concurrency.lockutils [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Lock "758ed671-347a-4949-9842-2f8cdcd261ae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 621.338385] env[62522]: DEBUG nova.compute.manager [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 621.457874] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74a47b45-b2b5-4c0d-ba24-89e6bbd06fbd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.467273] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.468485] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ccda518-c9eb-4373-9bf8-69a9024cf64a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.513565] env[62522]: DEBUG nova.compute.manager [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 621.519338] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0510fbcd-150e-420f-8fee-10aaf386ef94 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.530496] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25086448-0e51-4b5b-84ab-c6d66f5a60f3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.547496] env[62522]: DEBUG nova.compute.provider_tree [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 621.598866] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Acquiring lock "3824a70e-8498-410a-904d-c7cd0de0c358" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.599103] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Lock "3824a70e-8498-410a-904d-c7cd0de0c358" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 621.871863] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.978891] env[62522]: DEBUG nova.compute.manager [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 622.030161] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Acquiring lock "678b6b5f-b410-4c55-872e-4a74da6d7ebc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 622.030410] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Lock "678b6b5f-b410-4c55-872e-4a74da6d7ebc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 622.039387] env[62522]: DEBUG oslo_concurrency.lockutils [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 622.056163] env[62522]: DEBUG nova.scheduler.client.report [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 622.101316] env[62522]: DEBUG nova.compute.manager [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 622.484114] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Acquiring lock "433387e7-8de9-4cfb-9012-8652c65b5b97" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 622.484917] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Lock "433387e7-8de9-4cfb-9012-8652c65b5b97" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 622.532213] env[62522]: DEBUG nova.virt.hardware [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 622.532446] env[62522]: DEBUG nova.virt.hardware [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 622.532603] env[62522]: DEBUG nova.virt.hardware [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 622.532784] env[62522]: DEBUG nova.virt.hardware [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 622.532928] env[62522]: DEBUG nova.virt.hardware [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 622.536784] env[62522]: DEBUG nova.virt.hardware [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 622.536784] env[62522]: DEBUG nova.virt.hardware [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 622.537159] env[62522]: DEBUG nova.virt.hardware [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 622.539375] env[62522]: DEBUG nova.virt.hardware [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 622.539582] env[62522]: DEBUG nova.virt.hardware [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 622.539763] env[62522]: DEBUG nova.virt.hardware [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 622.540284] env[62522]: DEBUG nova.compute.manager [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 622.547025] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0759cff-5e23-44fc-a66c-abe5cc2aed42 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.561741] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-277c951e-3eb7-421b-816d-7ec8de97d88c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.566074] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.304s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 622.566529] env[62522]: DEBUG nova.compute.manager [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 622.573024] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.104s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 622.573466] env[62522]: INFO nova.compute.claims [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 622.591694] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb753915-6372-423c-bad4-de748c5688d7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.622688] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Instance VIF info [] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 622.636900] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 622.636900] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5eb5d423-b154-44c7-ba05-6dd287e0d1f0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.641615] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 622.649247] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Created folder: OpenStack in parent group-v4. [ 622.649859] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Creating folder: Project (ec3c6c267302415fa28ade55ba43dca3). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 622.649859] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-795ca484-16d0-49d3-a823-264e3a9ed536 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.660107] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Created folder: Project (ec3c6c267302415fa28ade55ba43dca3) in parent group-v489562. [ 622.661611] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Creating folder: Instances. Parent ref: group-v489563. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 622.661611] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-67cdee2a-b5a3-4e05-beeb-4ed89c079b35 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.675211] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Created folder: Instances in parent group-v489563. [ 622.675211] env[62522]: DEBUG oslo.service.loopingcall [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 622.675211] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 622.675334] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6e14b3ba-7238-4d4c-a798-24a855131e4b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.696819] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 622.696819] env[62522]: value = "task-2414929" [ 622.696819] env[62522]: _type = "Task" [ 622.696819] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.705630] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2414929, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.989019] env[62522]: DEBUG nova.compute.manager [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 623.073666] env[62522]: DEBUG nova.compute.utils [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 623.075019] env[62522]: DEBUG nova.compute.manager [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 623.075306] env[62522]: DEBUG nova.network.neutron [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 623.090426] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 623.093990] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Acquiring lock "a5657a70-5374-4d52-be9a-2d05f9556d16" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 623.094282] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Lock "a5657a70-5374-4d52-be9a-2d05f9556d16" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 623.134229] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Acquiring lock "9a098809-cc26-4210-b09e-b7825c406294" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 623.134229] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Lock "9a098809-cc26-4210-b09e-b7825c406294" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 623.200200] env[62522]: DEBUG nova.policy [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '32247919dd95421bae260e20ea166dc1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '500952665d634a76916f1998279db580', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 623.212496] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2414929, 'name': CreateVM_Task, 'duration_secs': 0.321203} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.212756] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 623.214156] env[62522]: DEBUG oslo_vmware.service [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33208f28-3a57-4b02-815b-e8eff448e25b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.221114] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 623.221298] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.224490] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 623.224490] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7cb3cb6-00bc-4fd1-bbfd-3796e89616cf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.229569] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Waiting for the task: (returnval){ [ 623.229569] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52685283-abf7-4597-2d6f-8238ae717915" [ 623.229569] env[62522]: _type = "Task" [ 623.229569] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.240437] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52685283-abf7-4597-2d6f-8238ae717915, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.520073] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 623.586787] env[62522]: DEBUG nova.compute.manager [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 623.599080] env[62522]: DEBUG nova.compute.manager [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 623.636994] env[62522]: DEBUG nova.compute.manager [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 623.742126] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.742351] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 623.742678] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 623.742856] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.744049] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 623.745129] env[62522]: DEBUG nova.network.neutron [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Successfully created port: 36173a2e-7d22-4ac6-aa18-ef15b74e3de1 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 623.748239] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4c88ffd4-664c-4cb7-abbd-5adc60442b49 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.768639] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 623.769688] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 623.769688] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1857318c-5e71-4571-83be-a8fa9825fde7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.780288] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d657673e-52a6-40c3-973c-6b076e7a31c0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.787095] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Waiting for the task: (returnval){ [ 623.787095] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5202248c-d010-2eff-54fd-55ba54cdca36" [ 623.787095] env[62522]: _type = "Task" [ 623.787095] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.798589] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5202248c-d010-2eff-54fd-55ba54cdca36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.815691] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2313af67-0b9b-4f25-a391-8c0da66da4d3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.824788] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47413243-9076-48bc-b1b2-d854eab843b6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.868904] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caea2c1b-6a2c-4ec1-b731-55d27691f0ca {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.878022] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-682a588d-e8be-4be8-a13f-19eb7d61fc2f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.890421] env[62522]: DEBUG nova.compute.provider_tree [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 624.122018] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.156928] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.302248] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Preparing fetch location {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 624.302406] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Creating directory with path [datastore2] vmware_temp/cc959747-83bc-4b0a-be0d-5b355e6015b5/2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 624.303020] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fa1f4f2b-59d8-4632-8e8c-3856cc461939 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.323693] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Created directory with path [datastore2] vmware_temp/cc959747-83bc-4b0a-be0d-5b355e6015b5/2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 624.324690] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Fetch image to [datastore2] vmware_temp/cc959747-83bc-4b0a-be0d-5b355e6015b5/2ee4561b-ba48-4f45-82f6-eac89be98290/tmp-sparse.vmdk {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 624.324690] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Downloading image file data 2ee4561b-ba48-4f45-82f6-eac89be98290 to [datastore2] vmware_temp/cc959747-83bc-4b0a-be0d-5b355e6015b5/2ee4561b-ba48-4f45-82f6-eac89be98290/tmp-sparse.vmdk on the data store datastore2 {{(pid=62522) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 624.325409] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1351d5c1-92b4-4667-92d2-f5c26da97d07 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.333461] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1982248c-2047-4da6-9d29-d6b8b50664e5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.345174] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3fdad1d-955a-4c52-a44d-80145f58bbda {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.383626] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-965bdfbf-6345-4c07-b1c9-7610fc8b0677 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.390943] env[62522]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-05294c5f-0ae9-440b-9b44-1f4abcc7f84a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.393627] env[62522]: DEBUG nova.scheduler.client.report [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 624.421308] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Downloading image file data 2ee4561b-ba48-4f45-82f6-eac89be98290 to the data store datastore2 {{(pid=62522) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 624.446818] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Acquiring lock "84ad5317-344d-44c1-9318-fa1574321296" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.447189] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Lock "84ad5317-344d-44c1-9318-fa1574321296" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.598435] env[62522]: DEBUG nova.compute.manager [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 624.631671] env[62522]: DEBUG nova.virt.hardware [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 624.631671] env[62522]: DEBUG nova.virt.hardware [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 624.631671] env[62522]: DEBUG nova.virt.hardware [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 624.631821] env[62522]: DEBUG nova.virt.hardware [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 624.631821] env[62522]: DEBUG nova.virt.hardware [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 624.631821] env[62522]: DEBUG nova.virt.hardware [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 624.631821] env[62522]: DEBUG nova.virt.hardware [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 624.631940] env[62522]: DEBUG nova.virt.hardware [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 624.632668] env[62522]: DEBUG nova.virt.hardware [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 624.632668] env[62522]: DEBUG nova.virt.hardware [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 624.632668] env[62522]: DEBUG nova.virt.hardware [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 624.633617] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dc9c6b2-da21-419d-b5fb-fd3091d2797a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.638076] env[62522]: DEBUG oslo_vmware.rw_handles [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/cc959747-83bc-4b0a-be0d-5b355e6015b5/2ee4561b-ba48-4f45-82f6-eac89be98290/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62522) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 624.643242] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb01912-f0c1-4825-8fc7-74bebcde5bf8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.899273] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.327s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.899872] env[62522]: DEBUG nova.compute.manager [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 624.904478] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.033s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.906047] env[62522]: INFO nova.compute.claims [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 625.255563] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 625.256299] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 625.256299] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Starting heal instance info cache {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 625.256299] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Rebuilding the list of instances to heal {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 625.368199] env[62522]: DEBUG oslo_vmware.rw_handles [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Completed reading data from the image iterator. {{(pid=62522) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 625.368421] env[62522]: DEBUG oslo_vmware.rw_handles [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/cc959747-83bc-4b0a-be0d-5b355e6015b5/2ee4561b-ba48-4f45-82f6-eac89be98290/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62522) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 625.412172] env[62522]: DEBUG nova.compute.utils [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 625.413996] env[62522]: DEBUG nova.compute.manager [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 625.413996] env[62522]: DEBUG nova.network.neutron [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 625.502899] env[62522]: DEBUG nova.policy [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '201a9232b8a94db9a15012d6e2e23cb8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b6ea04e641514ce28bb1366da528ac2b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 625.521920] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Downloaded image file data 2ee4561b-ba48-4f45-82f6-eac89be98290 to vmware_temp/cc959747-83bc-4b0a-be0d-5b355e6015b5/2ee4561b-ba48-4f45-82f6-eac89be98290/tmp-sparse.vmdk on the data store datastore2 {{(pid=62522) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 625.524540] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Caching image {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 625.524847] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Copying Virtual Disk [datastore2] vmware_temp/cc959747-83bc-4b0a-be0d-5b355e6015b5/2ee4561b-ba48-4f45-82f6-eac89be98290/tmp-sparse.vmdk to [datastore2] vmware_temp/cc959747-83bc-4b0a-be0d-5b355e6015b5/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 625.525218] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5bd0bb66-1319-4a06-9495-ed26b038cf9e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.534428] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Waiting for the task: (returnval){ [ 625.534428] env[62522]: value = "task-2414930" [ 625.534428] env[62522]: _type = "Task" [ 625.534428] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.544781] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Task: {'id': task-2414930, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.763026] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Skipping network cache update for instance because it is Building. {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 625.763026] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Skipping network cache update for instance because it is Building. {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 625.763026] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Skipping network cache update for instance because it is Building. {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 625.763026] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Skipping network cache update for instance because it is Building. {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 625.763026] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Didn't find any instances for network info cache update. {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 625.763026] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 625.763475] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 625.763475] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 625.763475] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 625.763475] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 625.763475] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 625.763475] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62522) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 625.763673] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 625.919525] env[62522]: DEBUG nova.compute.manager [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 626.046669] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Task: {'id': task-2414930, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.137562] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-054dd585-80dd-4e99-800e-3cc80c0e4f31 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.148292] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4650e8f-affc-427c-93e6-f59cd39f4516 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.184831] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f58df44-0ab2-4da5-917c-9c3d8b57a0aa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.194652] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89412420-dc6b-431e-9227-6e3d10cea23b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.212542] env[62522]: DEBUG nova.compute.provider_tree [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 626.267578] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 626.550551] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Task: {'id': task-2414930, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.657359} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.550833] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Copied Virtual Disk [datastore2] vmware_temp/cc959747-83bc-4b0a-be0d-5b355e6015b5/2ee4561b-ba48-4f45-82f6-eac89be98290/tmp-sparse.vmdk to [datastore2] vmware_temp/cc959747-83bc-4b0a-be0d-5b355e6015b5/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 626.551117] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Deleting the datastore file [datastore2] vmware_temp/cc959747-83bc-4b0a-be0d-5b355e6015b5/2ee4561b-ba48-4f45-82f6-eac89be98290/tmp-sparse.vmdk {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 626.551269] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-44639e60-7b54-41b6-a54f-0e3712cfe904 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.558177] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Waiting for the task: (returnval){ [ 626.558177] env[62522]: value = "task-2414931" [ 626.558177] env[62522]: _type = "Task" [ 626.558177] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.569021] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Task: {'id': task-2414931, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.627213] env[62522]: DEBUG nova.network.neutron [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Successfully created port: 7268d88d-66d8-4214-a46c-9f03f18f95cb {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 626.663779] env[62522]: DEBUG nova.network.neutron [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Successfully updated port: 36173a2e-7d22-4ac6-aa18-ef15b74e3de1 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 626.715712] env[62522]: DEBUG nova.scheduler.client.report [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 626.934561] env[62522]: DEBUG nova.compute.manager [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 626.973803] env[62522]: DEBUG nova.virt.hardware [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 626.974276] env[62522]: DEBUG nova.virt.hardware [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 626.974572] env[62522]: DEBUG nova.virt.hardware [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 626.974902] env[62522]: DEBUG nova.virt.hardware [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 626.975215] env[62522]: DEBUG nova.virt.hardware [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 626.975639] env[62522]: DEBUG nova.virt.hardware [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 626.975990] env[62522]: DEBUG nova.virt.hardware [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 626.976233] env[62522]: DEBUG nova.virt.hardware [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 626.976540] env[62522]: DEBUG nova.virt.hardware [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 626.976917] env[62522]: DEBUG nova.virt.hardware [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 626.977534] env[62522]: DEBUG nova.virt.hardware [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 626.978528] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2706401b-e4f8-41ec-9b8f-de45fe268c3d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.988204] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97062d09-1843-4870-857f-bb4e6c40e803 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.067938] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Task: {'id': task-2414931, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.052682} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.068307] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 627.068436] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Moving file from [datastore2] vmware_temp/cc959747-83bc-4b0a-be0d-5b355e6015b5/2ee4561b-ba48-4f45-82f6-eac89be98290 to [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290. {{(pid=62522) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 627.068655] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-c29ec49a-d63d-4609-ac3f-4462ddccd4dd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.076995] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Waiting for the task: (returnval){ [ 627.076995] env[62522]: value = "task-2414932" [ 627.076995] env[62522]: _type = "Task" [ 627.076995] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.084568] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Task: {'id': task-2414932, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.097392] env[62522]: DEBUG nova.compute.manager [req-142ed21e-ebb7-4280-97e8-a0847032813b req-78be3215-ce48-49f1-8dbf-fb20f4e3a4ca service nova] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Received event network-vif-plugged-36173a2e-7d22-4ac6-aa18-ef15b74e3de1 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 627.097392] env[62522]: DEBUG oslo_concurrency.lockutils [req-142ed21e-ebb7-4280-97e8-a0847032813b req-78be3215-ce48-49f1-8dbf-fb20f4e3a4ca service nova] Acquiring lock "a3830103-2dcb-40ac-8e62-b331fe4673ff-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.097392] env[62522]: DEBUG oslo_concurrency.lockutils [req-142ed21e-ebb7-4280-97e8-a0847032813b req-78be3215-ce48-49f1-8dbf-fb20f4e3a4ca service nova] Lock "a3830103-2dcb-40ac-8e62-b331fe4673ff-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 627.097392] env[62522]: DEBUG oslo_concurrency.lockutils [req-142ed21e-ebb7-4280-97e8-a0847032813b req-78be3215-ce48-49f1-8dbf-fb20f4e3a4ca service nova] Lock "a3830103-2dcb-40ac-8e62-b331fe4673ff-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 627.097392] env[62522]: DEBUG nova.compute.manager [req-142ed21e-ebb7-4280-97e8-a0847032813b req-78be3215-ce48-49f1-8dbf-fb20f4e3a4ca service nova] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] No waiting events found dispatching network-vif-plugged-36173a2e-7d22-4ac6-aa18-ef15b74e3de1 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 627.098468] env[62522]: WARNING nova.compute.manager [req-142ed21e-ebb7-4280-97e8-a0847032813b req-78be3215-ce48-49f1-8dbf-fb20f4e3a4ca service nova] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Received unexpected event network-vif-plugged-36173a2e-7d22-4ac6-aa18-ef15b74e3de1 for instance with vm_state building and task_state spawning. [ 627.167318] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Acquiring lock "refresh_cache-a3830103-2dcb-40ac-8e62-b331fe4673ff" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 627.167518] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Acquired lock "refresh_cache-a3830103-2dcb-40ac-8e62-b331fe4673ff" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.167724] env[62522]: DEBUG nova.network.neutron [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 627.221722] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.317s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 627.222291] env[62522]: DEBUG nova.compute.manager [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 627.225611] env[62522]: DEBUG oslo_concurrency.lockutils [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.186s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 627.228224] env[62522]: INFO nova.compute.claims [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 627.588127] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Task: {'id': task-2414932, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.024764} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.588767] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] File moved {{(pid=62522) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 627.588978] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Cleaning up location [datastore2] vmware_temp/cc959747-83bc-4b0a-be0d-5b355e6015b5 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 627.589186] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Deleting the datastore file [datastore2] vmware_temp/cc959747-83bc-4b0a-be0d-5b355e6015b5 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 627.589497] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bdf80613-b752-4b2a-93d2-eb29d1c038bf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.595940] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Waiting for the task: (returnval){ [ 627.595940] env[62522]: value = "task-2414933" [ 627.595940] env[62522]: _type = "Task" [ 627.595940] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.604339] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Task: {'id': task-2414933, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.741168] env[62522]: DEBUG nova.compute.utils [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 627.743920] env[62522]: DEBUG nova.network.neutron [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 627.746313] env[62522]: DEBUG nova.compute.manager [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 627.746403] env[62522]: DEBUG nova.network.neutron [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 627.901188] env[62522]: DEBUG nova.policy [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e959d2cd75d94a38b0d6a7b93f74f819', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '13a5a5169d8345a7a88fef5ff0ecd26e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 628.106338] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Task: {'id': task-2414933, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.023509} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.106338] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 628.106944] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37f560f0-53ba-4cb2-8d1b-24f26485e3f7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.112669] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Waiting for the task: (returnval){ [ 628.112669] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e1122b-8630-0dcc-7bc2-43f7f069f071" [ 628.112669] env[62522]: _type = "Task" [ 628.112669] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.126256] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e1122b-8630-0dcc-7bc2-43f7f069f071, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.252836] env[62522]: DEBUG nova.compute.manager [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 628.263915] env[62522]: DEBUG nova.network.neutron [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Updating instance_info_cache with network_info: [{"id": "36173a2e-7d22-4ac6-aa18-ef15b74e3de1", "address": "fa:16:3e:1a:e7:5b", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.195", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36173a2e-7d", "ovs_interfaceid": "36173a2e-7d22-4ac6-aa18-ef15b74e3de1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.497420] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bbab436-364c-4753-920c-5dcea5b0edec {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.505478] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbc525f2-a9bb-4bc8-aaf6-e7285d505292 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.541286] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b092d69e-997a-4146-975d-b316818947c8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.549711] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e9dbad5-115d-464f-88f0-16ecc900aaaf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.563352] env[62522]: DEBUG nova.compute.provider_tree [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 628.624336] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e1122b-8630-0dcc-7bc2-43f7f069f071, 'name': SearchDatastore_Task, 'duration_secs': 0.021419} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.624720] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 628.624837] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] c8779822-1694-463e-bd06-5f84d867d1bd/c8779822-1694-463e-bd06-5f84d867d1bd.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 628.625109] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fae54430-20d4-4b90-8d84-aee678fd632a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.631828] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Waiting for the task: (returnval){ [ 628.631828] env[62522]: value = "task-2414934" [ 628.631828] env[62522]: _type = "Task" [ 628.631828] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.640806] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Task: {'id': task-2414934, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.769966] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Releasing lock "refresh_cache-a3830103-2dcb-40ac-8e62-b331fe4673ff" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 628.770314] env[62522]: DEBUG nova.compute.manager [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Instance network_info: |[{"id": "36173a2e-7d22-4ac6-aa18-ef15b74e3de1", "address": "fa:16:3e:1a:e7:5b", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.195", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36173a2e-7d", "ovs_interfaceid": "36173a2e-7d22-4ac6-aa18-ef15b74e3de1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 628.770872] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:e7:5b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f6d1427-d86b-4371-9172-50e4bb0eb1cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '36173a2e-7d22-4ac6-aa18-ef15b74e3de1', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 628.789636] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Creating folder: Project (500952665d634a76916f1998279db580). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 628.789636] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3fee953c-0f33-434a-a168-dee6d41fb281 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.799032] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Created folder: Project (500952665d634a76916f1998279db580) in parent group-v489562. [ 628.799032] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Creating folder: Instances. Parent ref: group-v489566. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 628.799286] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0f670a4b-3d24-4332-92c1-2c425b3f1766 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.809177] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Created folder: Instances in parent group-v489566. [ 628.809466] env[62522]: DEBUG oslo.service.loopingcall [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 628.809669] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 628.809870] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-da812668-874c-4f5e-a49a-415839bab9ec {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.832845] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 628.832845] env[62522]: value = "task-2414937" [ 628.832845] env[62522]: _type = "Task" [ 628.832845] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.846495] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2414937, 'name': CreateVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.899117] env[62522]: DEBUG nova.network.neutron [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Successfully created port: d312748e-14f2-4467-bf2a-2f6479f774f0 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 629.068763] env[62522]: DEBUG nova.scheduler.client.report [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 629.150902] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Task: {'id': task-2414934, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.265353] env[62522]: DEBUG nova.compute.manager [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 629.303194] env[62522]: DEBUG nova.virt.hardware [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 629.303547] env[62522]: DEBUG nova.virt.hardware [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 629.303715] env[62522]: DEBUG nova.virt.hardware [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 629.303910] env[62522]: DEBUG nova.virt.hardware [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 629.304066] env[62522]: DEBUG nova.virt.hardware [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 629.304754] env[62522]: DEBUG nova.virt.hardware [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 629.304754] env[62522]: DEBUG nova.virt.hardware [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 629.304754] env[62522]: DEBUG nova.virt.hardware [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 629.305819] env[62522]: DEBUG nova.virt.hardware [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 629.305819] env[62522]: DEBUG nova.virt.hardware [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 629.305819] env[62522]: DEBUG nova.virt.hardware [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 629.306820] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e94b6952-f708-4467-90f9-7a476c0480b8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.315976] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3f3fa34-4a3c-4dfe-a968-0256e39a3864 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.344031] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2414937, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.422770] env[62522]: DEBUG nova.network.neutron [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Successfully updated port: 7268d88d-66d8-4214-a46c-9f03f18f95cb {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 629.574578] env[62522]: DEBUG oslo_concurrency.lockutils [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.349s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 629.575097] env[62522]: DEBUG nova.compute.manager [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 629.578364] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.937s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 629.580286] env[62522]: INFO nova.compute.claims [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 629.646064] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Acquiring lock "bf2ccaeb-610a-437b-be94-d3caefbe15c5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.647382] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Lock "bf2ccaeb-610a-437b-be94-d3caefbe15c5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 629.654344] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Task: {'id': task-2414934, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.566817} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.655437] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] c8779822-1694-463e-bd06-5f84d867d1bd/c8779822-1694-463e-bd06-5f84d867d1bd.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 629.656047] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 629.656754] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f6b83ece-3de6-4ed1-b3b8-72e08186a647 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.667626] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Waiting for the task: (returnval){ [ 629.667626] env[62522]: value = "task-2414938" [ 629.667626] env[62522]: _type = "Task" [ 629.667626] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.678611] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Task: {'id': task-2414938, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.846274] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2414937, 'name': CreateVM_Task, 'duration_secs': 0.600339} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.846370] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 629.863173] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 629.863389] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.863641] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 629.863899] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a630e6b-a0d1-4d38-8f56-9a9c15c68955 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.869679] env[62522]: DEBUG oslo_vmware.api [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Waiting for the task: (returnval){ [ 629.869679] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5237bbe3-062e-3286-5de8-f6c6267ea47e" [ 629.869679] env[62522]: _type = "Task" [ 629.869679] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.876859] env[62522]: DEBUG oslo_vmware.api [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5237bbe3-062e-3286-5de8-f6c6267ea47e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.975580] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Acquiring lock "refresh_cache-7828f9c8-fc02-4218-ba93-5362af807dad" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 629.975580] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Acquired lock "refresh_cache-7828f9c8-fc02-4218-ba93-5362af807dad" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.975580] env[62522]: DEBUG nova.network.neutron [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 630.088027] env[62522]: DEBUG nova.compute.utils [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 630.094229] env[62522]: DEBUG nova.compute.manager [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 630.094397] env[62522]: DEBUG nova.network.neutron [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 630.178699] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Task: {'id': task-2414938, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.181116} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.179081] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 630.180139] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed654d6e-5dcc-4538-ae95-7e3d6b85a3b6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.212071] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Reconfiguring VM instance instance-00000001 to attach disk [datastore2] c8779822-1694-463e-bd06-5f84d867d1bd/c8779822-1694-463e-bd06-5f84d867d1bd.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 630.212434] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ccc31b4-add6-4517-b4c1-1e941c5c8d8a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.237525] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Waiting for the task: (returnval){ [ 630.237525] env[62522]: value = "task-2414939" [ 630.237525] env[62522]: _type = "Task" [ 630.237525] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.249552] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Task: {'id': task-2414939, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.273453] env[62522]: DEBUG nova.policy [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9d8e184bd9a04966afd6d6f192184e2d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c7a75d2c29c4446da2662a4c70328003', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 630.385804] env[62522]: DEBUG oslo_vmware.api [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5237bbe3-062e-3286-5de8-f6c6267ea47e, 'name': SearchDatastore_Task, 'duration_secs': 0.009966} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.386267] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 630.386896] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 630.386944] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 630.387703] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.388926] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 630.389297] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d6552d82-3c23-45e5-afd0-3e3eadfffe4a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.397745] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 630.401086] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 630.401086] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddcc89ba-236d-4aa9-b443-4da7142841ac {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.407756] env[62522]: DEBUG oslo_vmware.api [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Waiting for the task: (returnval){ [ 630.407756] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52ff93a7-e0dd-c8c3-b3c9-6a7ae0e541a8" [ 630.407756] env[62522]: _type = "Task" [ 630.407756] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.421826] env[62522]: DEBUG oslo_vmware.api [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52ff93a7-e0dd-c8c3-b3c9-6a7ae0e541a8, 'name': SearchDatastore_Task, 'duration_secs': 0.010233} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.425673] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4c0a3cc-9980-4984-ae03-f1d4a2aed08c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.431084] env[62522]: DEBUG oslo_vmware.api [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Waiting for the task: (returnval){ [ 630.431084] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522e83f5-db08-ed84-3228-c4bcd6e30330" [ 630.431084] env[62522]: _type = "Task" [ 630.431084] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.442332] env[62522]: DEBUG oslo_vmware.api [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522e83f5-db08-ed84-3228-c4bcd6e30330, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.518504] env[62522]: DEBUG nova.network.neutron [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 630.597390] env[62522]: DEBUG nova.compute.manager [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 630.749932] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Task: {'id': task-2414939, 'name': ReconfigVM_Task, 'duration_secs': 0.292804} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.750260] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Reconfigured VM instance instance-00000001 to attach disk [datastore2] c8779822-1694-463e-bd06-5f84d867d1bd/c8779822-1694-463e-bd06-5f84d867d1bd.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 630.750946] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e502117c-e21d-48dc-8e77-fc2c2fac4609 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.757318] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Waiting for the task: (returnval){ [ 630.757318] env[62522]: value = "task-2414940" [ 630.757318] env[62522]: _type = "Task" [ 630.757318] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.769022] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Task: {'id': task-2414940, 'name': Rename_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.782204] env[62522]: DEBUG nova.compute.manager [req-ccd6be8b-1596-4632-8b1f-dfc5317c57d4 req-55e908fe-db6d-4c95-98cc-6b9659f7926a service nova] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Received event network-changed-36173a2e-7d22-4ac6-aa18-ef15b74e3de1 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 630.782417] env[62522]: DEBUG nova.compute.manager [req-ccd6be8b-1596-4632-8b1f-dfc5317c57d4 req-55e908fe-db6d-4c95-98cc-6b9659f7926a service nova] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Refreshing instance network info cache due to event network-changed-36173a2e-7d22-4ac6-aa18-ef15b74e3de1. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 630.782696] env[62522]: DEBUG oslo_concurrency.lockutils [req-ccd6be8b-1596-4632-8b1f-dfc5317c57d4 req-55e908fe-db6d-4c95-98cc-6b9659f7926a service nova] Acquiring lock "refresh_cache-a3830103-2dcb-40ac-8e62-b331fe4673ff" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 630.782757] env[62522]: DEBUG oslo_concurrency.lockutils [req-ccd6be8b-1596-4632-8b1f-dfc5317c57d4 req-55e908fe-db6d-4c95-98cc-6b9659f7926a service nova] Acquired lock "refresh_cache-a3830103-2dcb-40ac-8e62-b331fe4673ff" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.782982] env[62522]: DEBUG nova.network.neutron [req-ccd6be8b-1596-4632-8b1f-dfc5317c57d4 req-55e908fe-db6d-4c95-98cc-6b9659f7926a service nova] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Refreshing network info cache for port 36173a2e-7d22-4ac6-aa18-ef15b74e3de1 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 630.849858] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d63e495-1b11-4179-afa4-0684e30242f4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.860972] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bebfc817-3f65-4a29-83ea-a85a090633bf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.897849] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070e24d5-6bb0-4cdc-9dd0-c40302cfb8f6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.901536] env[62522]: DEBUG nova.compute.manager [req-e6488b6d-df6e-4f4c-92c7-8538e8711778 req-2babdea6-92d4-45b6-ad90-caaebcbffc81 service nova] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Received event network-vif-plugged-7268d88d-66d8-4214-a46c-9f03f18f95cb {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 630.901954] env[62522]: DEBUG oslo_concurrency.lockutils [req-e6488b6d-df6e-4f4c-92c7-8538e8711778 req-2babdea6-92d4-45b6-ad90-caaebcbffc81 service nova] Acquiring lock "7828f9c8-fc02-4218-ba93-5362af807dad-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 630.902060] env[62522]: DEBUG oslo_concurrency.lockutils [req-e6488b6d-df6e-4f4c-92c7-8538e8711778 req-2babdea6-92d4-45b6-ad90-caaebcbffc81 service nova] Lock "7828f9c8-fc02-4218-ba93-5362af807dad-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 630.902154] env[62522]: DEBUG oslo_concurrency.lockutils [req-e6488b6d-df6e-4f4c-92c7-8538e8711778 req-2babdea6-92d4-45b6-ad90-caaebcbffc81 service nova] Lock "7828f9c8-fc02-4218-ba93-5362af807dad-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 630.902359] env[62522]: DEBUG nova.compute.manager [req-e6488b6d-df6e-4f4c-92c7-8538e8711778 req-2babdea6-92d4-45b6-ad90-caaebcbffc81 service nova] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] No waiting events found dispatching network-vif-plugged-7268d88d-66d8-4214-a46c-9f03f18f95cb {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 630.902488] env[62522]: WARNING nova.compute.manager [req-e6488b6d-df6e-4f4c-92c7-8538e8711778 req-2babdea6-92d4-45b6-ad90-caaebcbffc81 service nova] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Received unexpected event network-vif-plugged-7268d88d-66d8-4214-a46c-9f03f18f95cb for instance with vm_state building and task_state spawning. [ 630.908506] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1b2566c-87df-431c-9642-64e064e1d507 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.924876] env[62522]: DEBUG nova.compute.provider_tree [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 630.943417] env[62522]: DEBUG oslo_vmware.api [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522e83f5-db08-ed84-3228-c4bcd6e30330, 'name': SearchDatastore_Task, 'duration_secs': 0.008769} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.943726] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 630.944035] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] a3830103-2dcb-40ac-8e62-b331fe4673ff/a3830103-2dcb-40ac-8e62-b331fe4673ff.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 630.946539] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-582f788e-4896-42a0-a41d-9e6536b1531b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.950962] env[62522]: DEBUG oslo_vmware.api [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Waiting for the task: (returnval){ [ 630.950962] env[62522]: value = "task-2414941" [ 630.950962] env[62522]: _type = "Task" [ 630.950962] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.959482] env[62522]: DEBUG oslo_vmware.api [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': task-2414941, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.140824] env[62522]: DEBUG nova.network.neutron [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Updating instance_info_cache with network_info: [{"id": "7268d88d-66d8-4214-a46c-9f03f18f95cb", "address": "fa:16:3e:67:a8:1d", "network": {"id": "e8f2a03e-de08-467a-afd9-367a46aa3303", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1770276349-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ea04e641514ce28bb1366da528ac2b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "136c3499-9ca0-4f85-903d-1f194aa66ed9", "external-id": "nsx-vlan-transportzone-307", "segmentation_id": 307, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7268d88d-66", "ovs_interfaceid": "7268d88d-66d8-4214-a46c-9f03f18f95cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.270304] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Task: {'id': task-2414940, 'name': Rename_Task, 'duration_secs': 0.136759} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.270904] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 631.271490] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-672f413a-08dd-4b33-aeb4-f2d73ae6da4d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.280152] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Waiting for the task: (returnval){ [ 631.280152] env[62522]: value = "task-2414942" [ 631.280152] env[62522]: _type = "Task" [ 631.280152] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.294312] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Task: {'id': task-2414942, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.427349] env[62522]: DEBUG nova.scheduler.client.report [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 631.433316] env[62522]: DEBUG nova.network.neutron [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Successfully created port: fae6b6fe-00ac-409a-be5f-719500f98702 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 631.463746] env[62522]: DEBUG oslo_vmware.api [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': task-2414941, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.507126} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.464172] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] a3830103-2dcb-40ac-8e62-b331fe4673ff/a3830103-2dcb-40ac-8e62-b331fe4673ff.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 631.464544] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 631.464896] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-83010383-5f50-41fb-999d-3f9520642681 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.474127] env[62522]: DEBUG oslo_vmware.api [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Waiting for the task: (returnval){ [ 631.474127] env[62522]: value = "task-2414943" [ 631.474127] env[62522]: _type = "Task" [ 631.474127] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.482650] env[62522]: DEBUG oslo_vmware.api [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': task-2414943, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.610635] env[62522]: DEBUG nova.compute.manager [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 631.647462] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Releasing lock "refresh_cache-7828f9c8-fc02-4218-ba93-5362af807dad" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 631.648104] env[62522]: DEBUG nova.compute.manager [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Instance network_info: |[{"id": "7268d88d-66d8-4214-a46c-9f03f18f95cb", "address": "fa:16:3e:67:a8:1d", "network": {"id": "e8f2a03e-de08-467a-afd9-367a46aa3303", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1770276349-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ea04e641514ce28bb1366da528ac2b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "136c3499-9ca0-4f85-903d-1f194aa66ed9", "external-id": "nsx-vlan-transportzone-307", "segmentation_id": 307, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7268d88d-66", "ovs_interfaceid": "7268d88d-66d8-4214-a46c-9f03f18f95cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 631.652411] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:a8:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '136c3499-9ca0-4f85-903d-1f194aa66ed9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7268d88d-66d8-4214-a46c-9f03f18f95cb', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 631.661032] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Creating folder: Project (b6ea04e641514ce28bb1366da528ac2b). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 631.663455] env[62522]: DEBUG nova.virt.hardware [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 631.664173] env[62522]: DEBUG nova.virt.hardware [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 631.664173] env[62522]: DEBUG nova.virt.hardware [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 631.664173] env[62522]: DEBUG nova.virt.hardware [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 631.664312] env[62522]: DEBUG nova.virt.hardware [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 631.664489] env[62522]: DEBUG nova.virt.hardware [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 631.664757] env[62522]: DEBUG nova.virt.hardware [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 631.664970] env[62522]: DEBUG nova.virt.hardware [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 631.665190] env[62522]: DEBUG nova.virt.hardware [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 631.665381] env[62522]: DEBUG nova.virt.hardware [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 631.665582] env[62522]: DEBUG nova.virt.hardware [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 631.665910] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5845020f-5981-498d-8d50-8d86808c76bb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.669039] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cecd43d-67cc-415c-bd25-e32267ab07ce {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.680760] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Created folder: Project (b6ea04e641514ce28bb1366da528ac2b) in parent group-v489562. [ 631.681053] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Creating folder: Instances. Parent ref: group-v489569. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 631.683314] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-76bfe2cc-b3e7-4f7d-bbe4-e6490c38cdbb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.686164] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad38afd0-9c74-4091-b791-a02c438d082d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.701049] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Created folder: Instances in parent group-v489569. [ 631.701281] env[62522]: DEBUG oslo.service.loopingcall [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 631.701642] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 631.702030] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-01fc5acb-9a0a-4e9a-b0d8-00bff3f48c34 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.722162] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 631.722162] env[62522]: value = "task-2414946" [ 631.722162] env[62522]: _type = "Task" [ 631.722162] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.731803] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2414946, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.790669] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Task: {'id': task-2414942, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.932999] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.355s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 631.933647] env[62522]: DEBUG nova.compute.manager [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 631.937873] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.850s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 631.941273] env[62522]: INFO nova.compute.claims [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 631.987627] env[62522]: DEBUG oslo_vmware.api [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': task-2414943, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062393} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.988619] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 631.990085] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dd513f8-d0d4-4293-b3cc-61c1535a688e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.018523] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Reconfiguring VM instance instance-00000002 to attach disk [datastore2] a3830103-2dcb-40ac-8e62-b331fe4673ff/a3830103-2dcb-40ac-8e62-b331fe4673ff.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 632.019025] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e98eff5e-b532-40d2-a2aa-5377fc62c66f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.038275] env[62522]: DEBUG nova.network.neutron [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Successfully updated port: d312748e-14f2-4467-bf2a-2f6479f774f0 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 632.051096] env[62522]: DEBUG oslo_vmware.api [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Waiting for the task: (returnval){ [ 632.051096] env[62522]: value = "task-2414947" [ 632.051096] env[62522]: _type = "Task" [ 632.051096] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.065034] env[62522]: DEBUG oslo_vmware.api [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': task-2414947, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.148384] env[62522]: DEBUG nova.network.neutron [req-ccd6be8b-1596-4632-8b1f-dfc5317c57d4 req-55e908fe-db6d-4c95-98cc-6b9659f7926a service nova] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Updated VIF entry in instance network info cache for port 36173a2e-7d22-4ac6-aa18-ef15b74e3de1. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 632.148781] env[62522]: DEBUG nova.network.neutron [req-ccd6be8b-1596-4632-8b1f-dfc5317c57d4 req-55e908fe-db6d-4c95-98cc-6b9659f7926a service nova] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Updating instance_info_cache with network_info: [{"id": "36173a2e-7d22-4ac6-aa18-ef15b74e3de1", "address": "fa:16:3e:1a:e7:5b", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.195", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36173a2e-7d", "ovs_interfaceid": "36173a2e-7d22-4ac6-aa18-ef15b74e3de1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.236117] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2414946, 'name': CreateVM_Task, 'duration_secs': 0.356541} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.236310] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 632.238107] env[62522]: DEBUG oslo_vmware.service [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-278c052c-489d-487d-a90e-28338ab25aa0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.244993] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 632.245158] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.245508] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 632.245755] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1abfd24a-5939-4793-8571-8333cf7a441b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.250541] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Waiting for the task: (returnval){ [ 632.250541] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f2dd8f-596b-8f11-f7c7-a31071fdfd95" [ 632.250541] env[62522]: _type = "Task" [ 632.250541] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.258940] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f2dd8f-596b-8f11-f7c7-a31071fdfd95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.291111] env[62522]: DEBUG oslo_vmware.api [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Task: {'id': task-2414942, 'name': PowerOnVM_Task, 'duration_secs': 0.518447} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.291385] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 632.291677] env[62522]: INFO nova.compute.manager [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Took 10.31 seconds to spawn the instance on the hypervisor. [ 632.292268] env[62522]: DEBUG nova.compute.manager [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 632.292700] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b3dcef3-279b-4c61-94d8-a561eb7d8d77 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.445959] env[62522]: DEBUG nova.compute.utils [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 632.450340] env[62522]: DEBUG nova.compute.manager [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 632.450546] env[62522]: DEBUG nova.network.neutron [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 632.540498] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquiring lock "refresh_cache-74b6ae10-a595-4139-8eda-38fe1aa298cf" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 632.540655] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquired lock "refresh_cache-74b6ae10-a595-4139-8eda-38fe1aa298cf" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.540801] env[62522]: DEBUG nova.network.neutron [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 632.567589] env[62522]: DEBUG oslo_vmware.api [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': task-2414947, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.645970] env[62522]: DEBUG nova.policy [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c311a0f0ba854dc3b7f30d641c97229f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ca08d150df0147b29b30fb57739c7a6e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 632.652660] env[62522]: DEBUG oslo_concurrency.lockutils [req-ccd6be8b-1596-4632-8b1f-dfc5317c57d4 req-55e908fe-db6d-4c95-98cc-6b9659f7926a service nova] Releasing lock "refresh_cache-a3830103-2dcb-40ac-8e62-b331fe4673ff" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 632.761275] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 632.761517] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 632.761741] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 632.761885] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.762147] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 632.762308] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f9c46078-e579-43ef-acec-df6d649b38dc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.784508] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 632.784704] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 632.785493] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90aa67df-bd14-42a4-8581-54596a251e47 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.793052] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d05b9f5f-4c41-4d89-bb33-32300f8161d6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.798708] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Waiting for the task: (returnval){ [ 632.798708] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52ffb167-67f8-9f64-240b-d945343a544f" [ 632.798708] env[62522]: _type = "Task" [ 632.798708] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.812941] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52ffb167-67f8-9f64-240b-d945343a544f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.812941] env[62522]: INFO nova.compute.manager [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Took 15.06 seconds to build instance. [ 632.957627] env[62522]: DEBUG nova.compute.manager [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 633.065534] env[62522]: DEBUG oslo_vmware.api [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': task-2414947, 'name': ReconfigVM_Task, 'duration_secs': 0.97891} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.065702] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Reconfigured VM instance instance-00000002 to attach disk [datastore2] a3830103-2dcb-40ac-8e62-b331fe4673ff/a3830103-2dcb-40ac-8e62-b331fe4673ff.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 633.066706] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b0025c4a-355c-4fa4-bfb4-9622e6cf4e8d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.073457] env[62522]: DEBUG oslo_vmware.api [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Waiting for the task: (returnval){ [ 633.073457] env[62522]: value = "task-2414948" [ 633.073457] env[62522]: _type = "Task" [ 633.073457] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.087303] env[62522]: DEBUG oslo_vmware.api [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': task-2414948, 'name': Rename_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.175546] env[62522]: DEBUG nova.network.neutron [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 633.194055] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-673a42ca-60ab-479a-903c-e82b3aea6a68 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.201907] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c025a4d1-1630-4965-9976-e4b492376143 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.245400] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-411642c7-e221-4b57-b0cf-d4beb249f4ff {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.257846] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-404c77a1-e9b3-484e-9e42-012132c39186 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.274230] env[62522]: DEBUG nova.compute.provider_tree [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 633.316153] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Preparing fetch location {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 633.316885] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Creating directory with path [datastore1] vmware_temp/23ce2b0d-b163-461e-b55d-a018588b2a4c/2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 633.316885] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c104f8d8-5e5e-4f9e-95a1-5f91bce59670 tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Lock "c8779822-1694-463e-bd06-5f84d867d1bd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.569s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 633.317484] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1018ad82-06c0-49c3-9169-87efb892bb70 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.333664] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Created directory with path [datastore1] vmware_temp/23ce2b0d-b163-461e-b55d-a018588b2a4c/2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 633.333664] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Fetch image to [datastore1] vmware_temp/23ce2b0d-b163-461e-b55d-a018588b2a4c/2ee4561b-ba48-4f45-82f6-eac89be98290/tmp-sparse.vmdk {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 633.334157] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Downloading image file data 2ee4561b-ba48-4f45-82f6-eac89be98290 to [datastore1] vmware_temp/23ce2b0d-b163-461e-b55d-a018588b2a4c/2ee4561b-ba48-4f45-82f6-eac89be98290/tmp-sparse.vmdk on the data store datastore1 {{(pid=62522) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 633.334646] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e54c530-23e3-4a8c-87f5-7ebd86d90cc0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.344586] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a170bdd4-be63-47d5-b184-03395d4246a4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.356455] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a862a1e-30e2-4af3-9273-6f7207bd74a5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.396577] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3f8b13d-6e18-4dee-9cdf-cc6a97666fdf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.404105] env[62522]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-645d1990-b7fc-45fc-9a67-490989a280ce {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.439452] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Downloading image file data 2ee4561b-ba48-4f45-82f6-eac89be98290 to the data store datastore1 {{(pid=62522) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 633.521787] env[62522]: DEBUG oslo_vmware.rw_handles [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/23ce2b0d-b163-461e-b55d-a018588b2a4c/2ee4561b-ba48-4f45-82f6-eac89be98290/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62522) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 633.606387] env[62522]: DEBUG oslo_vmware.api [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': task-2414948, 'name': Rename_Task, 'duration_secs': 0.207819} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.609652] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 633.609928] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-53023aa2-d920-4d84-aab1-faeb7fbbe3bf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.617521] env[62522]: DEBUG oslo_vmware.api [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Waiting for the task: (returnval){ [ 633.617521] env[62522]: value = "task-2414949" [ 633.617521] env[62522]: _type = "Task" [ 633.617521] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.628499] env[62522]: DEBUG oslo_vmware.api [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': task-2414949, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.772886] env[62522]: DEBUG nova.network.neutron [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Updating instance_info_cache with network_info: [{"id": "d312748e-14f2-4467-bf2a-2f6479f774f0", "address": "fa:16:3e:a9:0d:2b", "network": {"id": "be69fd15-aa3c-4e6e-9334-57674f1f2d81", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-543389568-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13a5a5169d8345a7a88fef5ff0ecd26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd312748e-14", "ovs_interfaceid": "d312748e-14f2-4467-bf2a-2f6479f774f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.780955] env[62522]: DEBUG nova.scheduler.client.report [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 633.821648] env[62522]: DEBUG nova.compute.manager [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 633.971502] env[62522]: DEBUG nova.compute.manager [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 634.014976] env[62522]: DEBUG nova.virt.hardware [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 634.015353] env[62522]: DEBUG nova.virt.hardware [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 634.015391] env[62522]: DEBUG nova.virt.hardware [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 634.016652] env[62522]: DEBUG nova.virt.hardware [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 634.016652] env[62522]: DEBUG nova.virt.hardware [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 634.016652] env[62522]: DEBUG nova.virt.hardware [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 634.016652] env[62522]: DEBUG nova.virt.hardware [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 634.016652] env[62522]: DEBUG nova.virt.hardware [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 634.017054] env[62522]: DEBUG nova.virt.hardware [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 634.017054] env[62522]: DEBUG nova.virt.hardware [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 634.017054] env[62522]: DEBUG nova.virt.hardware [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 634.018438] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e204e7b4-732e-4316-8f22-ae3289131fea {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.031550] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a91d6929-95a4-49bb-83c0-0d6f1f9bfdf0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.060551] env[62522]: DEBUG nova.compute.manager [None req-aecce57e-5a9b-4cf0-8f3c-fd9f5f1400cf tempest-ServerDiagnosticsV248Test-1103873209 tempest-ServerDiagnosticsV248Test-1103873209-project-admin] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 634.061886] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c30bfc7-0ff8-4cb2-8777-41a94572bb35 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.073171] env[62522]: INFO nova.compute.manager [None req-aecce57e-5a9b-4cf0-8f3c-fd9f5f1400cf tempest-ServerDiagnosticsV248Test-1103873209 tempest-ServerDiagnosticsV248Test-1103873209-project-admin] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Retrieving diagnostics [ 634.074435] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f024b4a7-060f-4d33-be45-441b9386f9dc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.123186] env[62522]: DEBUG nova.network.neutron [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Successfully created port: a0e9b152-7b65-405a-8302-dc8561d06224 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 634.141534] env[62522]: DEBUG oslo_vmware.api [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': task-2414949, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.203374] env[62522]: DEBUG oslo_vmware.rw_handles [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Completed reading data from the image iterator. {{(pid=62522) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 634.203624] env[62522]: DEBUG oslo_vmware.rw_handles [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/23ce2b0d-b163-461e-b55d-a018588b2a4c/2ee4561b-ba48-4f45-82f6-eac89be98290/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62522) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 634.226158] env[62522]: DEBUG oslo_concurrency.lockutils [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Acquiring lock "a804f755-58b2-4350-8726-4e82f60afcdc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 634.226442] env[62522]: DEBUG oslo_concurrency.lockutils [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Lock "a804f755-58b2-4350-8726-4e82f60afcdc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.276573] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Releasing lock "refresh_cache-74b6ae10-a595-4139-8eda-38fe1aa298cf" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 634.276890] env[62522]: DEBUG nova.compute.manager [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Instance network_info: |[{"id": "d312748e-14f2-4467-bf2a-2f6479f774f0", "address": "fa:16:3e:a9:0d:2b", "network": {"id": "be69fd15-aa3c-4e6e-9334-57674f1f2d81", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-543389568-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13a5a5169d8345a7a88fef5ff0ecd26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd312748e-14", "ovs_interfaceid": "d312748e-14f2-4467-bf2a-2f6479f774f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 634.277638] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:0d:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abe48956-848a-4e1f-b1f1-a27baa5390b9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd312748e-14f2-4467-bf2a-2f6479f774f0', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 634.288971] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Creating folder: Project (13a5a5169d8345a7a88fef5ff0ecd26e). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 634.289640] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.352s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 634.290263] env[62522]: DEBUG nova.compute.manager [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 634.293525] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a63b741e-91e0-4eb7-980d-b3e4a188f8b6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.297098] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.777s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.302080] env[62522]: INFO nova.compute.claims [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 634.320843] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Created folder: Project (13a5a5169d8345a7a88fef5ff0ecd26e) in parent group-v489562. [ 634.321139] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Creating folder: Instances. Parent ref: group-v489572. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 634.321445] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6caa9d13-7e10-4e9f-9036-bc984733b124 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.333536] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Created folder: Instances in parent group-v489572. [ 634.333802] env[62522]: DEBUG oslo.service.loopingcall [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 634.334597] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 634.334597] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f060622-d157-4230-b1a0-1b16993b7834 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.351532] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Downloaded image file data 2ee4561b-ba48-4f45-82f6-eac89be98290 to vmware_temp/23ce2b0d-b163-461e-b55d-a018588b2a4c/2ee4561b-ba48-4f45-82f6-eac89be98290/tmp-sparse.vmdk on the data store datastore1 {{(pid=62522) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 634.353499] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Caching image {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 634.353783] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Copying Virtual Disk [datastore1] vmware_temp/23ce2b0d-b163-461e-b55d-a018588b2a4c/2ee4561b-ba48-4f45-82f6-eac89be98290/tmp-sparse.vmdk to [datastore1] vmware_temp/23ce2b0d-b163-461e-b55d-a018588b2a4c/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 634.354060] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fcc100cc-c92a-4eff-b5b6-d69e7ab0e799 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.359615] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 634.362755] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 634.362755] env[62522]: value = "task-2414952" [ 634.362755] env[62522]: _type = "Task" [ 634.362755] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.363521] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Waiting for the task: (returnval){ [ 634.363521] env[62522]: value = "task-2414953" [ 634.363521] env[62522]: _type = "Task" [ 634.363521] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.379778] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2414952, 'name': CreateVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.384049] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Task: {'id': task-2414953, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.519082] env[62522]: DEBUG nova.network.neutron [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Successfully updated port: fae6b6fe-00ac-409a-be5f-719500f98702 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 634.635814] env[62522]: DEBUG oslo_vmware.api [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': task-2414949, 'name': PowerOnVM_Task, 'duration_secs': 0.711173} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.636139] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 634.636343] env[62522]: INFO nova.compute.manager [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Took 10.04 seconds to spawn the instance on the hypervisor. [ 634.636518] env[62522]: DEBUG nova.compute.manager [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 634.637346] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f77e73b3-0239-4654-8162-b354ded48351 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.690993] env[62522]: DEBUG nova.compute.manager [req-5b7bcdc3-e023-41f0-8bbb-b5a9ed7faf7c req-24bcb868-d2bd-44d3-9e5f-3e12818248b8 service nova] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Received event network-vif-plugged-d312748e-14f2-4467-bf2a-2f6479f774f0 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 634.691312] env[62522]: DEBUG oslo_concurrency.lockutils [req-5b7bcdc3-e023-41f0-8bbb-b5a9ed7faf7c req-24bcb868-d2bd-44d3-9e5f-3e12818248b8 service nova] Acquiring lock "74b6ae10-a595-4139-8eda-38fe1aa298cf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 634.691373] env[62522]: DEBUG oslo_concurrency.lockutils [req-5b7bcdc3-e023-41f0-8bbb-b5a9ed7faf7c req-24bcb868-d2bd-44d3-9e5f-3e12818248b8 service nova] Lock "74b6ae10-a595-4139-8eda-38fe1aa298cf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.691483] env[62522]: DEBUG oslo_concurrency.lockutils [req-5b7bcdc3-e023-41f0-8bbb-b5a9ed7faf7c req-24bcb868-d2bd-44d3-9e5f-3e12818248b8 service nova] Lock "74b6ae10-a595-4139-8eda-38fe1aa298cf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 634.692027] env[62522]: DEBUG nova.compute.manager [req-5b7bcdc3-e023-41f0-8bbb-b5a9ed7faf7c req-24bcb868-d2bd-44d3-9e5f-3e12818248b8 service nova] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] No waiting events found dispatching network-vif-plugged-d312748e-14f2-4467-bf2a-2f6479f774f0 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 634.692027] env[62522]: WARNING nova.compute.manager [req-5b7bcdc3-e023-41f0-8bbb-b5a9ed7faf7c req-24bcb868-d2bd-44d3-9e5f-3e12818248b8 service nova] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Received unexpected event network-vif-plugged-d312748e-14f2-4467-bf2a-2f6479f774f0 for instance with vm_state building and task_state spawning. [ 634.692027] env[62522]: DEBUG nova.compute.manager [req-5b7bcdc3-e023-41f0-8bbb-b5a9ed7faf7c req-24bcb868-d2bd-44d3-9e5f-3e12818248b8 service nova] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Received event network-changed-d312748e-14f2-4467-bf2a-2f6479f774f0 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 634.693417] env[62522]: DEBUG nova.compute.manager [req-5b7bcdc3-e023-41f0-8bbb-b5a9ed7faf7c req-24bcb868-d2bd-44d3-9e5f-3e12818248b8 service nova] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Refreshing instance network info cache due to event network-changed-d312748e-14f2-4467-bf2a-2f6479f774f0. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 634.696406] env[62522]: DEBUG oslo_concurrency.lockutils [req-5b7bcdc3-e023-41f0-8bbb-b5a9ed7faf7c req-24bcb868-d2bd-44d3-9e5f-3e12818248b8 service nova] Acquiring lock "refresh_cache-74b6ae10-a595-4139-8eda-38fe1aa298cf" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 634.696715] env[62522]: DEBUG oslo_concurrency.lockutils [req-5b7bcdc3-e023-41f0-8bbb-b5a9ed7faf7c req-24bcb868-d2bd-44d3-9e5f-3e12818248b8 service nova] Acquired lock "refresh_cache-74b6ae10-a595-4139-8eda-38fe1aa298cf" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.696772] env[62522]: DEBUG nova.network.neutron [req-5b7bcdc3-e023-41f0-8bbb-b5a9ed7faf7c req-24bcb868-d2bd-44d3-9e5f-3e12818248b8 service nova] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Refreshing network info cache for port d312748e-14f2-4467-bf2a-2f6479f774f0 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 634.798346] env[62522]: DEBUG nova.compute.utils [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 634.801744] env[62522]: DEBUG nova.compute.manager [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 634.801744] env[62522]: DEBUG nova.network.neutron [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 634.837728] env[62522]: DEBUG nova.compute.manager [req-81589125-b916-4e65-986c-e578eede4e9e req-dc7abcea-d02f-4054-aebf-6129d12e8d2e service nova] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Received event network-changed-7268d88d-66d8-4214-a46c-9f03f18f95cb {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 634.837728] env[62522]: DEBUG nova.compute.manager [req-81589125-b916-4e65-986c-e578eede4e9e req-dc7abcea-d02f-4054-aebf-6129d12e8d2e service nova] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Refreshing instance network info cache due to event network-changed-7268d88d-66d8-4214-a46c-9f03f18f95cb. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 634.837728] env[62522]: DEBUG oslo_concurrency.lockutils [req-81589125-b916-4e65-986c-e578eede4e9e req-dc7abcea-d02f-4054-aebf-6129d12e8d2e service nova] Acquiring lock "refresh_cache-7828f9c8-fc02-4218-ba93-5362af807dad" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 634.837728] env[62522]: DEBUG oslo_concurrency.lockutils [req-81589125-b916-4e65-986c-e578eede4e9e req-dc7abcea-d02f-4054-aebf-6129d12e8d2e service nova] Acquired lock "refresh_cache-7828f9c8-fc02-4218-ba93-5362af807dad" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.837728] env[62522]: DEBUG nova.network.neutron [req-81589125-b916-4e65-986c-e578eede4e9e req-dc7abcea-d02f-4054-aebf-6129d12e8d2e service nova] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Refreshing network info cache for port 7268d88d-66d8-4214-a46c-9f03f18f95cb {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 634.893834] env[62522]: DEBUG nova.policy [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '99dedc7250f54175ab7d07396f89a248', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '209afab6e4c840fd8ae644e8d4d90fbe', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 634.895459] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2414952, 'name': CreateVM_Task, 'duration_secs': 0.343368} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.895770] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Task: {'id': task-2414953, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.896305] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 634.897602] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 634.897602] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.897824] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 634.898549] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70d4ef30-3df1-488f-977b-a458917af8dd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.903762] env[62522]: DEBUG oslo_vmware.api [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 634.903762] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52773f36-4961-3034-ec0e-9bed4d534920" [ 634.903762] env[62522]: _type = "Task" [ 634.903762] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.915240] env[62522]: DEBUG oslo_vmware.api [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52773f36-4961-3034-ec0e-9bed4d534920, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.022722] env[62522]: DEBUG oslo_concurrency.lockutils [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Acquiring lock "refresh_cache-758ed671-347a-4949-9842-2f8cdcd261ae" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 635.022916] env[62522]: DEBUG oslo_concurrency.lockutils [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Acquired lock "refresh_cache-758ed671-347a-4949-9842-2f8cdcd261ae" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.023097] env[62522]: DEBUG nova.network.neutron [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 635.122752] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Acquiring lock "95e4fe36-6830-4fc4-bb53-1e5643c2f95b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 635.122752] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Lock "95e4fe36-6830-4fc4-bb53-1e5643c2f95b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 635.159860] env[62522]: INFO nova.compute.manager [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Took 14.92 seconds to build instance. [ 635.308706] env[62522]: DEBUG nova.compute.manager [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 635.389027] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Task: {'id': task-2414953, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.837398} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.389027] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Copied Virtual Disk [datastore1] vmware_temp/23ce2b0d-b163-461e-b55d-a018588b2a4c/2ee4561b-ba48-4f45-82f6-eac89be98290/tmp-sparse.vmdk to [datastore1] vmware_temp/23ce2b0d-b163-461e-b55d-a018588b2a4c/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 635.389027] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Deleting the datastore file [datastore1] vmware_temp/23ce2b0d-b163-461e-b55d-a018588b2a4c/2ee4561b-ba48-4f45-82f6-eac89be98290/tmp-sparse.vmdk {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 635.389027] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d4eca5db-5c69-4f55-a6c1-a1ffebe1e7ca {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.396050] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Waiting for the task: (returnval){ [ 635.396050] env[62522]: value = "task-2414954" [ 635.396050] env[62522]: _type = "Task" [ 635.396050] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.429286] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Task: {'id': task-2414954, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.439486] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 635.439912] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 635.440339] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 635.633889] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd129dd0-9018-415f-a12e-d96bed8c1496 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.645884] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-484d77c9-09b6-48a7-8317-12d96cf2d4e2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.689443] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9379d07d-4b76-4a69-aecd-b85860c16a3f tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Lock "a3830103-2dcb-40ac-8e62-b331fe4673ff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.459s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 635.691038] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a5d2cef-6b8e-481b-900a-b77bc76684e2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.699619] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4020c6db-1213-4b90-a2c0-c7684f7a0093 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.715702] env[62522]: DEBUG nova.compute.provider_tree [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 635.771575] env[62522]: DEBUG nova.network.neutron [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 635.911138] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Task: {'id': task-2414954, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.048067} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.911400] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 635.911591] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Moving file from [datastore1] vmware_temp/23ce2b0d-b163-461e-b55d-a018588b2a4c/2ee4561b-ba48-4f45-82f6-eac89be98290 to [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290. {{(pid=62522) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 635.912141] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-4ee30dfe-af4d-47ed-9a61-3b66684d5bc7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.918773] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Waiting for the task: (returnval){ [ 635.918773] env[62522]: value = "task-2414955" [ 635.918773] env[62522]: _type = "Task" [ 635.918773] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.928748] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Task: {'id': task-2414955, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.967439] env[62522]: DEBUG nova.network.neutron [req-5b7bcdc3-e023-41f0-8bbb-b5a9ed7faf7c req-24bcb868-d2bd-44d3-9e5f-3e12818248b8 service nova] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Updated VIF entry in instance network info cache for port d312748e-14f2-4467-bf2a-2f6479f774f0. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 635.967913] env[62522]: DEBUG nova.network.neutron [req-5b7bcdc3-e023-41f0-8bbb-b5a9ed7faf7c req-24bcb868-d2bd-44d3-9e5f-3e12818248b8 service nova] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Updating instance_info_cache with network_info: [{"id": "d312748e-14f2-4467-bf2a-2f6479f774f0", "address": "fa:16:3e:a9:0d:2b", "network": {"id": "be69fd15-aa3c-4e6e-9334-57674f1f2d81", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-543389568-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13a5a5169d8345a7a88fef5ff0ecd26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd312748e-14", "ovs_interfaceid": "d312748e-14f2-4467-bf2a-2f6479f774f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.154681] env[62522]: DEBUG nova.network.neutron [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Successfully created port: 20713b6e-4b87-4065-a83b-f62812551cd5 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 636.196356] env[62522]: DEBUG nova.compute.manager [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 636.255335] env[62522]: ERROR nova.scheduler.client.report [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [req-3916c123-6a49-4808-b810-59c538a28548] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c7fa38b2-245d-4337-a012-22c1a01c0a72. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3916c123-6a49-4808-b810-59c538a28548"}]} [ 636.277235] env[62522]: DEBUG nova.scheduler.client.report [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Refreshing inventories for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 636.298615] env[62522]: DEBUG nova.scheduler.client.report [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Updating ProviderTree inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 636.299279] env[62522]: DEBUG nova.compute.provider_tree [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 636.313597] env[62522]: DEBUG nova.scheduler.client.report [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Refreshing aggregate associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, aggregates: None {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 636.322333] env[62522]: DEBUG nova.network.neutron [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Updating instance_info_cache with network_info: [{"id": "fae6b6fe-00ac-409a-be5f-719500f98702", "address": "fa:16:3e:3d:b2:9f", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.51", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfae6b6fe-00", "ovs_interfaceid": "fae6b6fe-00ac-409a-be5f-719500f98702", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.328329] env[62522]: DEBUG nova.compute.manager [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 636.350190] env[62522]: DEBUG nova.scheduler.client.report [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Refreshing trait associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 636.356420] env[62522]: DEBUG nova.network.neutron [req-81589125-b916-4e65-986c-e578eede4e9e req-dc7abcea-d02f-4054-aebf-6129d12e8d2e service nova] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Updated VIF entry in instance network info cache for port 7268d88d-66d8-4214-a46c-9f03f18f95cb. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 636.356420] env[62522]: DEBUG nova.network.neutron [req-81589125-b916-4e65-986c-e578eede4e9e req-dc7abcea-d02f-4054-aebf-6129d12e8d2e service nova] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Updating instance_info_cache with network_info: [{"id": "7268d88d-66d8-4214-a46c-9f03f18f95cb", "address": "fa:16:3e:67:a8:1d", "network": {"id": "e8f2a03e-de08-467a-afd9-367a46aa3303", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1770276349-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6ea04e641514ce28bb1366da528ac2b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "136c3499-9ca0-4f85-903d-1f194aa66ed9", "external-id": "nsx-vlan-transportzone-307", "segmentation_id": 307, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7268d88d-66", "ovs_interfaceid": "7268d88d-66d8-4214-a46c-9f03f18f95cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.366780] env[62522]: DEBUG nova.virt.hardware [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 636.366780] env[62522]: DEBUG nova.virt.hardware [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 636.366780] env[62522]: DEBUG nova.virt.hardware [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 636.366943] env[62522]: DEBUG nova.virt.hardware [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 636.366943] env[62522]: DEBUG nova.virt.hardware [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 636.367860] env[62522]: DEBUG nova.virt.hardware [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 636.367860] env[62522]: DEBUG nova.virt.hardware [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 636.367860] env[62522]: DEBUG nova.virt.hardware [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 636.368146] env[62522]: DEBUG nova.virt.hardware [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 636.368349] env[62522]: DEBUG nova.virt.hardware [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 636.370169] env[62522]: DEBUG nova.virt.hardware [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 636.371622] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-609b9524-c1c2-4c1d-b586-4250fdee97fd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.385350] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6c387cb-da67-41c0-84bd-90df8dcf8c5d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.432144] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Task: {'id': task-2414955, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.047633} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.432366] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] File moved {{(pid=62522) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 636.432499] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Cleaning up location [datastore1] vmware_temp/23ce2b0d-b163-461e-b55d-a018588b2a4c {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 636.432657] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Deleting the datastore file [datastore1] vmware_temp/23ce2b0d-b163-461e-b55d-a018588b2a4c {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 636.432898] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f04d61ba-d783-44bf-8b2e-64a8dbdb5c73 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.439825] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Waiting for the task: (returnval){ [ 636.439825] env[62522]: value = "task-2414956" [ 636.439825] env[62522]: _type = "Task" [ 636.439825] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.447798] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Task: {'id': task-2414956, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.470668] env[62522]: DEBUG oslo_concurrency.lockutils [req-5b7bcdc3-e023-41f0-8bbb-b5a9ed7faf7c req-24bcb868-d2bd-44d3-9e5f-3e12818248b8 service nova] Releasing lock "refresh_cache-74b6ae10-a595-4139-8eda-38fe1aa298cf" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 636.554554] env[62522]: DEBUG nova.network.neutron [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Successfully updated port: a0e9b152-7b65-405a-8302-dc8561d06224 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 636.579291] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4a29075-de74-4673-9e8b-4abeddf74d41 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.588347] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e82b44f-44ec-480c-8cab-05073d28c4d4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.620377] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c6c1c80-d638-42fe-a7cd-1f2fd1a6110e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.629710] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35825e83-ed3e-4f21-919a-4422fc728428 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.644018] env[62522]: DEBUG nova.compute.provider_tree [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 636.726247] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.825772] env[62522]: DEBUG oslo_concurrency.lockutils [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Releasing lock "refresh_cache-758ed671-347a-4949-9842-2f8cdcd261ae" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 636.826093] env[62522]: DEBUG nova.compute.manager [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Instance network_info: |[{"id": "fae6b6fe-00ac-409a-be5f-719500f98702", "address": "fa:16:3e:3d:b2:9f", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.51", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfae6b6fe-00", "ovs_interfaceid": "fae6b6fe-00ac-409a-be5f-719500f98702", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 636.826444] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3d:b2:9f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f6d1427-d86b-4371-9172-50e4bb0eb1cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fae6b6fe-00ac-409a-be5f-719500f98702', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 636.836926] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Creating folder: Project (c7a75d2c29c4446da2662a4c70328003). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 636.837405] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2761a0d5-0812-43b0-b519-c06c5f4ea802 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.852129] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Created folder: Project (c7a75d2c29c4446da2662a4c70328003) in parent group-v489562. [ 636.852374] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Creating folder: Instances. Parent ref: group-v489575. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 636.852624] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2c3708f0-fb5f-4fc1-9ea5-c1c262b7a629 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.860461] env[62522]: DEBUG oslo_concurrency.lockutils [req-81589125-b916-4e65-986c-e578eede4e9e req-dc7abcea-d02f-4054-aebf-6129d12e8d2e service nova] Releasing lock "refresh_cache-7828f9c8-fc02-4218-ba93-5362af807dad" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 636.862314] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Created folder: Instances in parent group-v489575. [ 636.862552] env[62522]: DEBUG oslo.service.loopingcall [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 636.862748] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 636.862954] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6593d8d0-c094-435f-832d-4ca4386baa5a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.883706] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 636.883706] env[62522]: value = "task-2414959" [ 636.883706] env[62522]: _type = "Task" [ 636.883706] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.893944] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2414959, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.953320] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Task: {'id': task-2414956, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.062976} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.953320] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 636.953320] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26e27e89-3f7e-4596-98b9-b765d89e3eab {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.960046] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Waiting for the task: (returnval){ [ 636.960046] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526a062a-bea1-4661-e1a4-151a8c8284c4" [ 636.960046] env[62522]: _type = "Task" [ 636.960046] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.969691] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526a062a-bea1-4661-e1a4-151a8c8284c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.028325] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Acquiring lock "4de70165-c28f-44b7-a01a-caa0787170b8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.028325] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Lock "4de70165-c28f-44b7-a01a-caa0787170b8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.057255] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Acquiring lock "refresh_cache-3824a70e-8498-410a-904d-c7cd0de0c358" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 637.057851] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Acquired lock "refresh_cache-3824a70e-8498-410a-904d-c7cd0de0c358" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.057851] env[62522]: DEBUG nova.network.neutron [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 637.182634] env[62522]: ERROR nova.scheduler.client.report [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [req-db0dd644-9853-49e1-8358-e3b8649819ef] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c7fa38b2-245d-4337-a012-22c1a01c0a72. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-db0dd644-9853-49e1-8358-e3b8649819ef"}]} [ 637.213522] env[62522]: DEBUG nova.scheduler.client.report [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Refreshing inventories for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 637.247134] env[62522]: DEBUG nova.scheduler.client.report [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Updating ProviderTree inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 637.247134] env[62522]: DEBUG nova.compute.provider_tree [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 637.261326] env[62522]: DEBUG nova.scheduler.client.report [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Refreshing aggregate associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, aggregates: None {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 637.284543] env[62522]: DEBUG nova.scheduler.client.report [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Refreshing trait associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 637.398011] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2414959, 'name': CreateVM_Task, 'duration_secs': 0.350625} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.398251] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 637.398926] env[62522]: DEBUG oslo_concurrency.lockutils [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 637.399106] env[62522]: DEBUG oslo_concurrency.lockutils [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.399415] env[62522]: DEBUG oslo_concurrency.lockutils [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 637.399664] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e2af098-f0f2-43e0-be48-1c30d1cda457 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.407816] env[62522]: DEBUG oslo_vmware.api [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Waiting for the task: (returnval){ [ 637.407816] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a19f6c-930d-41e8-40ea-21bf13205995" [ 637.407816] env[62522]: _type = "Task" [ 637.407816] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.416950] env[62522]: DEBUG oslo_vmware.api [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a19f6c-930d-41e8-40ea-21bf13205995, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.473373] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526a062a-bea1-4661-e1a4-151a8c8284c4, 'name': SearchDatastore_Task, 'duration_secs': 0.010966} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.473793] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 637.474394] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 7828f9c8-fc02-4218-ba93-5362af807dad/7828f9c8-fc02-4218-ba93-5362af807dad.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 637.477426] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.477693] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 637.478011] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3c007489-598f-4698-b6f1-7cc4fb342164 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.480361] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-64a49966-b18f-4198-9201-a1d451dede6f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.488777] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Waiting for the task: (returnval){ [ 637.488777] env[62522]: value = "task-2414960" [ 637.488777] env[62522]: _type = "Task" [ 637.488777] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.493777] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 637.494048] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 637.498436] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-efa2e54a-c72d-46e1-9ff5-cc39e1f1158b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.507024] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Task: {'id': task-2414960, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.508895] env[62522]: DEBUG oslo_vmware.api [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 637.508895] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520aad80-cb3f-5018-4657-8c12293e57e3" [ 637.508895] env[62522]: _type = "Task" [ 637.508895] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.518863] env[62522]: DEBUG oslo_vmware.api [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520aad80-cb3f-5018-4657-8c12293e57e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.594311] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd5dc745-5c4c-432d-bbe9-240bd6c30d98 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.603693] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a033e9cf-215b-4d8f-a481-c3890a9795a2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.642172] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf07f56e-83e0-4ee3-bff0-cc7efa0b532b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.653886] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71b07f4f-f510-45a3-a9da-0f0cf619c746 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.672077] env[62522]: DEBUG nova.compute.provider_tree [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 637.767212] env[62522]: DEBUG nova.network.neutron [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 637.834031] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Acquiring lock "c73686c6-4dd8-4f00-a65a-5d8574409ad1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.834552] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Lock "c73686c6-4dd8-4f00-a65a-5d8574409ad1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.867127] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Acquiring lock "7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.867127] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Lock "7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.923837] env[62522]: DEBUG oslo_vmware.api [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a19f6c-930d-41e8-40ea-21bf13205995, 'name': SearchDatastore_Task, 'duration_secs': 0.013653} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.924311] env[62522]: DEBUG oslo_concurrency.lockutils [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 637.924646] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 637.924909] env[62522]: DEBUG oslo_concurrency.lockutils [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 638.003558] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Task: {'id': task-2414960, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.021916] env[62522]: DEBUG oslo_vmware.api [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520aad80-cb3f-5018-4657-8c12293e57e3, 'name': SearchDatastore_Task, 'duration_secs': 0.009658} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.023450] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a51bf2e-1117-4f77-a40d-27a1bde9d914 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.030213] env[62522]: DEBUG oslo_vmware.api [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 638.030213] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5245f4be-c2b6-42d0-f939-8c127d0039c2" [ 638.030213] env[62522]: _type = "Task" [ 638.030213] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.039165] env[62522]: DEBUG oslo_vmware.api [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5245f4be-c2b6-42d0-f939-8c127d0039c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.198918] env[62522]: ERROR nova.scheduler.client.report [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [req-f7833d8b-2b29-4559-91d0-e179c6a44c45] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c7fa38b2-245d-4337-a012-22c1a01c0a72. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f7833d8b-2b29-4559-91d0-e179c6a44c45"}]} [ 638.216132] env[62522]: DEBUG nova.scheduler.client.report [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Refreshing inventories for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 638.233106] env[62522]: DEBUG nova.scheduler.client.report [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Updating ProviderTree inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 638.233106] env[62522]: DEBUG nova.compute.provider_tree [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 638.246792] env[62522]: DEBUG nova.scheduler.client.report [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Refreshing aggregate associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, aggregates: None {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 638.272334] env[62522]: DEBUG nova.scheduler.client.report [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Refreshing trait associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 638.340879] env[62522]: DEBUG nova.network.neutron [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Updating instance_info_cache with network_info: [{"id": "a0e9b152-7b65-405a-8302-dc8561d06224", "address": "fa:16:3e:e0:65:aa", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0e9b152-7b", "ovs_interfaceid": "a0e9b152-7b65-405a-8302-dc8561d06224", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.502424] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Task: {'id': task-2414960, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.590629} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.502767] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 7828f9c8-fc02-4218-ba93-5362af807dad/7828f9c8-fc02-4218-ba93-5362af807dad.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 638.503017] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 638.503292] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a3f79969-e2c8-467f-bee3-5e645d24912a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.509465] env[62522]: DEBUG nova.network.neutron [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Successfully updated port: 20713b6e-4b87-4065-a83b-f62812551cd5 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 638.515743] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Waiting for the task: (returnval){ [ 638.515743] env[62522]: value = "task-2414961" [ 638.515743] env[62522]: _type = "Task" [ 638.515743] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.529785] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Task: {'id': task-2414961, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.542745] env[62522]: DEBUG oslo_vmware.api [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5245f4be-c2b6-42d0-f939-8c127d0039c2, 'name': SearchDatastore_Task, 'duration_secs': 0.040617} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.543133] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 638.543510] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 74b6ae10-a595-4139-8eda-38fe1aa298cf/74b6ae10-a595-4139-8eda-38fe1aa298cf.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 638.543734] env[62522]: DEBUG oslo_concurrency.lockutils [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.544012] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 638.544261] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2950be8b-2be4-4fe7-9d1c-c1538905ede7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.548949] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e421da3b-989d-48a6-af63-74f624108bdf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.556301] env[62522]: DEBUG oslo_vmware.api [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 638.556301] env[62522]: value = "task-2414962" [ 638.556301] env[62522]: _type = "Task" [ 638.556301] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.567106] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 638.567367] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 638.573089] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed807dad-96e3-468d-93bf-e7ae5af23909 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.579327] env[62522]: DEBUG oslo_vmware.api [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2414962, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.582868] env[62522]: DEBUG oslo_vmware.api [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Waiting for the task: (returnval){ [ 638.582868] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f36cb7-8035-1dd0-9fbc-03e0dd47b3ff" [ 638.582868] env[62522]: _type = "Task" [ 638.582868] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.590861] env[62522]: DEBUG oslo_vmware.api [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f36cb7-8035-1dd0-9fbc-03e0dd47b3ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.603668] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0446463-0671-4969-bcb3-0d3b78d980d7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.612497] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f661eb0-02d5-41d2-a10e-f6b97029898e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.645737] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-759031f4-870b-41f0-8f5e-6b8293bbd5ce {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.660237] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbc98458-e5a7-4a10-a875-4feb11012567 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.672026] env[62522]: DEBUG nova.compute.provider_tree [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 638.849384] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Releasing lock "refresh_cache-3824a70e-8498-410a-904d-c7cd0de0c358" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 638.849716] env[62522]: DEBUG nova.compute.manager [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Instance network_info: |[{"id": "a0e9b152-7b65-405a-8302-dc8561d06224", "address": "fa:16:3e:e0:65:aa", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0e9b152-7b", "ovs_interfaceid": "a0e9b152-7b65-405a-8302-dc8561d06224", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 638.850708] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:65:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f6d1427-d86b-4371-9172-50e4bb0eb1cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a0e9b152-7b65-405a-8302-dc8561d06224', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 638.861570] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Creating folder: Project (ca08d150df0147b29b30fb57739c7a6e). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 638.861570] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9b91f5c5-7b31-4118-b795-7f3ef47439fb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.870994] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Created folder: Project (ca08d150df0147b29b30fb57739c7a6e) in parent group-v489562. [ 638.871204] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Creating folder: Instances. Parent ref: group-v489578. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 638.871941] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e8133957-42c1-43a6-bf59-b62b027378bf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.882936] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Created folder: Instances in parent group-v489578. [ 638.883838] env[62522]: DEBUG oslo.service.loopingcall [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 638.884135] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 638.884592] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fd96d256-ee7a-46ca-92ad-aee14095e658 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.906824] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 638.906824] env[62522]: value = "task-2414965" [ 638.906824] env[62522]: _type = "Task" [ 638.906824] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.918026] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2414965, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.013147] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Acquiring lock "refresh_cache-678b6b5f-b410-4c55-872e-4a74da6d7ebc" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 639.013147] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Acquired lock "refresh_cache-678b6b5f-b410-4c55-872e-4a74da6d7ebc" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.013147] env[62522]: DEBUG nova.network.neutron [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 639.028940] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Task: {'id': task-2414961, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.450894} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.028940] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 639.029624] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4f76b1d-92c9-452b-a618-65c9f66f5a21 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.054913] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] 7828f9c8-fc02-4218-ba93-5362af807dad/7828f9c8-fc02-4218-ba93-5362af807dad.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 639.055506] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8291b717-41e9-4ab9-a47e-83fc4d1154ff {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.080150] env[62522]: DEBUG oslo_vmware.api [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2414962, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.082455] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Waiting for the task: (returnval){ [ 639.082455] env[62522]: value = "task-2414966" [ 639.082455] env[62522]: _type = "Task" [ 639.082455] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.107552] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Task: {'id': task-2414966, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.108364] env[62522]: DEBUG oslo_vmware.api [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f36cb7-8035-1dd0-9fbc-03e0dd47b3ff, 'name': SearchDatastore_Task, 'duration_secs': 0.030868} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.108844] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8820b2d-9e30-4776-8375-afb84f71a13b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.115688] env[62522]: DEBUG oslo_vmware.api [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Waiting for the task: (returnval){ [ 639.115688] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d061bc-dc7c-89f6-a6c9-17cb455cd1a8" [ 639.115688] env[62522]: _type = "Task" [ 639.115688] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.122614] env[62522]: DEBUG oslo_vmware.api [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d061bc-dc7c-89f6-a6c9-17cb455cd1a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.149306] env[62522]: DEBUG nova.compute.manager [req-87715d07-a8df-4c06-be4e-ab8739e9a93d req-7a0a18e9-5b1a-41fb-8d58-940e062c667c service nova] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Received event network-vif-plugged-a0e9b152-7b65-405a-8302-dc8561d06224 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 639.149306] env[62522]: DEBUG oslo_concurrency.lockutils [req-87715d07-a8df-4c06-be4e-ab8739e9a93d req-7a0a18e9-5b1a-41fb-8d58-940e062c667c service nova] Acquiring lock "3824a70e-8498-410a-904d-c7cd0de0c358-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.149306] env[62522]: DEBUG oslo_concurrency.lockutils [req-87715d07-a8df-4c06-be4e-ab8739e9a93d req-7a0a18e9-5b1a-41fb-8d58-940e062c667c service nova] Lock "3824a70e-8498-410a-904d-c7cd0de0c358-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 639.149306] env[62522]: DEBUG oslo_concurrency.lockutils [req-87715d07-a8df-4c06-be4e-ab8739e9a93d req-7a0a18e9-5b1a-41fb-8d58-940e062c667c service nova] Lock "3824a70e-8498-410a-904d-c7cd0de0c358-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 639.149306] env[62522]: DEBUG nova.compute.manager [req-87715d07-a8df-4c06-be4e-ab8739e9a93d req-7a0a18e9-5b1a-41fb-8d58-940e062c667c service nova] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] No waiting events found dispatching network-vif-plugged-a0e9b152-7b65-405a-8302-dc8561d06224 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 639.149622] env[62522]: WARNING nova.compute.manager [req-87715d07-a8df-4c06-be4e-ab8739e9a93d req-7a0a18e9-5b1a-41fb-8d58-940e062c667c service nova] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Received unexpected event network-vif-plugged-a0e9b152-7b65-405a-8302-dc8561d06224 for instance with vm_state building and task_state spawning. [ 639.149622] env[62522]: DEBUG nova.compute.manager [req-87715d07-a8df-4c06-be4e-ab8739e9a93d req-7a0a18e9-5b1a-41fb-8d58-940e062c667c service nova] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Received event network-changed-a0e9b152-7b65-405a-8302-dc8561d06224 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 639.149622] env[62522]: DEBUG nova.compute.manager [req-87715d07-a8df-4c06-be4e-ab8739e9a93d req-7a0a18e9-5b1a-41fb-8d58-940e062c667c service nova] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Refreshing instance network info cache due to event network-changed-a0e9b152-7b65-405a-8302-dc8561d06224. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 639.149622] env[62522]: DEBUG oslo_concurrency.lockutils [req-87715d07-a8df-4c06-be4e-ab8739e9a93d req-7a0a18e9-5b1a-41fb-8d58-940e062c667c service nova] Acquiring lock "refresh_cache-3824a70e-8498-410a-904d-c7cd0de0c358" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 639.149915] env[62522]: DEBUG oslo_concurrency.lockutils [req-87715d07-a8df-4c06-be4e-ab8739e9a93d req-7a0a18e9-5b1a-41fb-8d58-940e062c667c service nova] Acquired lock "refresh_cache-3824a70e-8498-410a-904d-c7cd0de0c358" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.149915] env[62522]: DEBUG nova.network.neutron [req-87715d07-a8df-4c06-be4e-ab8739e9a93d req-7a0a18e9-5b1a-41fb-8d58-940e062c667c service nova] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Refreshing network info cache for port a0e9b152-7b65-405a-8302-dc8561d06224 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 639.152896] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] Acquiring lock "a3830103-2dcb-40ac-8e62-b331fe4673ff" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.153096] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] Lock "a3830103-2dcb-40ac-8e62-b331fe4673ff" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 639.153285] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] Acquiring lock "a3830103-2dcb-40ac-8e62-b331fe4673ff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.153509] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] Lock "a3830103-2dcb-40ac-8e62-b331fe4673ff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 639.154264] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] Lock "a3830103-2dcb-40ac-8e62-b331fe4673ff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 639.155803] env[62522]: INFO nova.compute.manager [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Terminating instance [ 639.211217] env[62522]: ERROR nova.scheduler.client.report [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [req-321574d4-8d02-4eb4-a8f1-af4e1836b2d4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c7fa38b2-245d-4337-a012-22c1a01c0a72. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-321574d4-8d02-4eb4-a8f1-af4e1836b2d4"}]} [ 639.211217] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.913s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 639.214146] env[62522]: ERROR nova.compute.manager [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] Failed to build and run instance: nova.exception.ResourceProviderUpdateConflict: A conflict was encountered attempting to update resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 (generation 21): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-321574d4-8d02-4eb4-a8f1-af4e1836b2d4"}]} [ 639.214146] env[62522]: ERROR nova.compute.manager [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] Traceback (most recent call last): [ 639.214146] env[62522]: ERROR nova.compute.manager [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 639.214146] env[62522]: ERROR nova.compute.manager [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] with self.rt.instance_claim(context, instance, node, allocs, [ 639.214146] env[62522]: ERROR nova.compute.manager [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 639.214146] env[62522]: ERROR nova.compute.manager [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] return f(*args, **kwargs) [ 639.214146] env[62522]: ERROR nova.compute.manager [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 215, in instance_claim [ 639.214146] env[62522]: ERROR nova.compute.manager [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] self._update(elevated, cn) [ 639.214146] env[62522]: ERROR nova.compute.manager [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1375, in _update [ 639.214707] env[62522]: ERROR nova.compute.manager [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] self._update_to_placement(context, compute_node, startup) [ 639.214707] env[62522]: ERROR nova.compute.manager [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 639.214707] env[62522]: ERROR nova.compute.manager [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 639.214707] env[62522]: ERROR nova.compute.manager [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 266, in call [ 639.214707] env[62522]: ERROR nova.compute.manager [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] raise attempt.get() [ 639.214707] env[62522]: ERROR nova.compute.manager [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 639.214707] env[62522]: ERROR nova.compute.manager [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] six.reraise(self.value[0], self.value[1], self.value[2]) [ 639.214707] env[62522]: ERROR nova.compute.manager [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 639.214707] env[62522]: ERROR nova.compute.manager [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] raise value [ 639.214707] env[62522]: ERROR nova.compute.manager [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 639.214707] env[62522]: ERROR nova.compute.manager [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 639.214707] env[62522]: ERROR nova.compute.manager [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1360, in _update_to_placement [ 639.214707] env[62522]: ERROR nova.compute.manager [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] self.reportclient.update_from_provider_tree( [ 639.215196] env[62522]: ERROR nova.compute.manager [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 639.215196] env[62522]: ERROR nova.compute.manager [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] self.set_inventory_for_provider( [ 639.215196] env[62522]: ERROR nova.compute.manager [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1007, in set_inventory_for_provider [ 639.215196] env[62522]: ERROR nova.compute.manager [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] raise exception.ResourceProviderUpdateConflict( [ 639.215196] env[62522]: ERROR nova.compute.manager [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] nova.exception.ResourceProviderUpdateConflict: A conflict was encountered attempting to update resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 (generation 21): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-321574d4-8d02-4eb4-a8f1-af4e1836b2d4"}]} [ 639.215196] env[62522]: ERROR nova.compute.manager [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] [ 639.215196] env[62522]: DEBUG nova.compute.utils [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] A conflict was encountered attempting to update resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 (generation 21): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource pro {{(pid=62522) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 639.218191] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.095s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 639.220921] env[62522]: INFO nova.compute.claims [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 639.230741] env[62522]: DEBUG nova.compute.manager [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] Build of instance 433387e7-8de9-4cfb-9012-8652c65b5b97 was re-scheduled: A conflict was encountered attempting to update resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 (generation 21): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-321574d4-8d02-4eb4-a8f1-af4e1836b2d4"}]} {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 639.230741] env[62522]: DEBUG nova.compute.manager [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] Unplugging VIFs for instance {{(pid=62522) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 639.230741] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Acquiring lock "refresh_cache-433387e7-8de9-4cfb-9012-8652c65b5b97" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 639.230934] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Acquired lock "refresh_cache-433387e7-8de9-4cfb-9012-8652c65b5b97" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.231043] env[62522]: DEBUG nova.network.neutron [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 639.261577] env[62522]: DEBUG nova.compute.manager [req-a0d30c29-5309-4e10-b6ff-301f66d0146b req-581da1b0-216d-4fdf-9d90-0394fe043fdf service nova] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Received event network-vif-plugged-fae6b6fe-00ac-409a-be5f-719500f98702 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 639.263140] env[62522]: DEBUG oslo_concurrency.lockutils [req-a0d30c29-5309-4e10-b6ff-301f66d0146b req-581da1b0-216d-4fdf-9d90-0394fe043fdf service nova] Acquiring lock "758ed671-347a-4949-9842-2f8cdcd261ae-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.263347] env[62522]: DEBUG oslo_concurrency.lockutils [req-a0d30c29-5309-4e10-b6ff-301f66d0146b req-581da1b0-216d-4fdf-9d90-0394fe043fdf service nova] Lock "758ed671-347a-4949-9842-2f8cdcd261ae-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 639.263548] env[62522]: DEBUG oslo_concurrency.lockutils [req-a0d30c29-5309-4e10-b6ff-301f66d0146b req-581da1b0-216d-4fdf-9d90-0394fe043fdf service nova] Lock "758ed671-347a-4949-9842-2f8cdcd261ae-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 639.264882] env[62522]: DEBUG nova.compute.manager [req-a0d30c29-5309-4e10-b6ff-301f66d0146b req-581da1b0-216d-4fdf-9d90-0394fe043fdf service nova] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] No waiting events found dispatching network-vif-plugged-fae6b6fe-00ac-409a-be5f-719500f98702 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 639.265203] env[62522]: WARNING nova.compute.manager [req-a0d30c29-5309-4e10-b6ff-301f66d0146b req-581da1b0-216d-4fdf-9d90-0394fe043fdf service nova] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Received unexpected event network-vif-plugged-fae6b6fe-00ac-409a-be5f-719500f98702 for instance with vm_state building and task_state spawning. [ 639.265443] env[62522]: DEBUG nova.compute.manager [req-a0d30c29-5309-4e10-b6ff-301f66d0146b req-581da1b0-216d-4fdf-9d90-0394fe043fdf service nova] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Received event network-changed-fae6b6fe-00ac-409a-be5f-719500f98702 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 639.265615] env[62522]: DEBUG nova.compute.manager [req-a0d30c29-5309-4e10-b6ff-301f66d0146b req-581da1b0-216d-4fdf-9d90-0394fe043fdf service nova] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Refreshing instance network info cache due to event network-changed-fae6b6fe-00ac-409a-be5f-719500f98702. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 639.265820] env[62522]: DEBUG oslo_concurrency.lockutils [req-a0d30c29-5309-4e10-b6ff-301f66d0146b req-581da1b0-216d-4fdf-9d90-0394fe043fdf service nova] Acquiring lock "refresh_cache-758ed671-347a-4949-9842-2f8cdcd261ae" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 639.265954] env[62522]: DEBUG oslo_concurrency.lockutils [req-a0d30c29-5309-4e10-b6ff-301f66d0146b req-581da1b0-216d-4fdf-9d90-0394fe043fdf service nova] Acquired lock "refresh_cache-758ed671-347a-4949-9842-2f8cdcd261ae" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.266143] env[62522]: DEBUG nova.network.neutron [req-a0d30c29-5309-4e10-b6ff-301f66d0146b req-581da1b0-216d-4fdf-9d90-0394fe043fdf service nova] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Refreshing network info cache for port fae6b6fe-00ac-409a-be5f-719500f98702 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 639.420369] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2414965, 'name': CreateVM_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.441512] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquiring lock "6d8b5429-113b-4280-9851-bf6614dde4a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.441775] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "6d8b5429-113b-4280-9851-bf6614dde4a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 639.489888] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquiring lock "68b4c229-0ace-486f-9a99-d3c955b7bdfb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.489888] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "68b4c229-0ace-486f-9a99-d3c955b7bdfb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 639.579637] env[62522]: DEBUG nova.network.neutron [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 639.588730] env[62522]: DEBUG oslo_vmware.api [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2414962, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.603214] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Task: {'id': task-2414966, 'name': ReconfigVM_Task, 'duration_secs': 0.379218} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.603214] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Reconfigured VM instance instance-00000003 to attach disk [datastore1] 7828f9c8-fc02-4218-ba93-5362af807dad/7828f9c8-fc02-4218-ba93-5362af807dad.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 639.605122] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-21d5bc59-4c6d-44df-a5af-b4bc2891db5f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.614994] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Waiting for the task: (returnval){ [ 639.614994] env[62522]: value = "task-2414967" [ 639.614994] env[62522]: _type = "Task" [ 639.614994] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.631116] env[62522]: DEBUG oslo_vmware.api [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d061bc-dc7c-89f6-a6c9-17cb455cd1a8, 'name': SearchDatastore_Task, 'duration_secs': 0.012311} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.638224] env[62522]: DEBUG oslo_concurrency.lockutils [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 639.638991] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 758ed671-347a-4949-9842-2f8cdcd261ae/758ed671-347a-4949-9842-2f8cdcd261ae.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 639.639204] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Task: {'id': task-2414967, 'name': Rename_Task} progress is 10%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.639464] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ef3a087c-4e9e-4c5b-9ce2-edb582b9d939 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.646525] env[62522]: DEBUG oslo_vmware.api [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Waiting for the task: (returnval){ [ 639.646525] env[62522]: value = "task-2414968" [ 639.646525] env[62522]: _type = "Task" [ 639.646525] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.664023] env[62522]: DEBUG nova.compute.manager [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 639.664023] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 639.664023] env[62522]: DEBUG oslo_vmware.api [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Task: {'id': task-2414968, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.664023] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fd382c6-8d16-45f2-9a1f-ff88134df4f6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.671507] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 639.671507] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5f367dd3-8a72-4ecd-9e89-740a1d7324c3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.678427] env[62522]: DEBUG oslo_vmware.api [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] Waiting for the task: (returnval){ [ 639.678427] env[62522]: value = "task-2414969" [ 639.678427] env[62522]: _type = "Task" [ 639.678427] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.684702] env[62522]: DEBUG oslo_vmware.api [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] Task: {'id': task-2414969, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.794165] env[62522]: DEBUG nova.network.neutron [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 639.919256] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2414965, 'name': CreateVM_Task, 'duration_secs': 0.772768} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.919425] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 639.920224] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 639.920392] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.920758] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 639.920968] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2e406ec-23a4-4773-ae76-f4e4fad3db21 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.925458] env[62522]: DEBUG oslo_vmware.api [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Waiting for the task: (returnval){ [ 639.925458] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d4d560-af63-7b58-916e-94855f885ea3" [ 639.925458] env[62522]: _type = "Task" [ 639.925458] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.933068] env[62522]: DEBUG oslo_vmware.api [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d4d560-af63-7b58-916e-94855f885ea3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.086299] env[62522]: DEBUG oslo_vmware.api [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2414962, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.099351} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.086671] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 74b6ae10-a595-4139-8eda-38fe1aa298cf/74b6ae10-a595-4139-8eda-38fe1aa298cf.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 640.086933] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 640.087266] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5d1e5af4-3ac4-4a87-9f18-ee6e3296dfe5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.099281] env[62522]: DEBUG oslo_vmware.api [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 640.099281] env[62522]: value = "task-2414970" [ 640.099281] env[62522]: _type = "Task" [ 640.099281] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.109114] env[62522]: DEBUG oslo_vmware.api [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2414970, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.127270] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Task: {'id': task-2414967, 'name': Rename_Task, 'duration_secs': 0.158473} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.127594] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 640.129726] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-08f21f28-4c92-49cc-9411-11efa3ea4813 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.142321] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Waiting for the task: (returnval){ [ 640.142321] env[62522]: value = "task-2414971" [ 640.142321] env[62522]: _type = "Task" [ 640.142321] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.176635] env[62522]: DEBUG oslo_vmware.api [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Task: {'id': task-2414968, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.177321] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Task: {'id': task-2414971, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.182279] env[62522]: DEBUG nova.network.neutron [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Updating instance_info_cache with network_info: [{"id": "20713b6e-4b87-4065-a83b-f62812551cd5", "address": "fa:16:3e:a5:e2:12", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.43", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20713b6e-4b", "ovs_interfaceid": "20713b6e-4b87-4065-a83b-f62812551cd5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.192297] env[62522]: DEBUG oslo_vmware.api [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] Task: {'id': task-2414969, 'name': PowerOffVM_Task, 'duration_secs': 0.264814} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.192297] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 640.192556] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 640.192842] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b5b647a0-d4f2-45ec-a102-7ca2587ff2b1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.264371] env[62522]: DEBUG nova.scheduler.client.report [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Refreshing inventories for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 640.269281] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 640.269681] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 640.269681] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] Deleting the datastore file [datastore2] a3830103-2dcb-40ac-8e62-b331fe4673ff {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 640.270191] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-325527e4-cff5-43bb-89e7-cc7226ab2e30 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.281969] env[62522]: DEBUG oslo_vmware.api [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] Waiting for the task: (returnval){ [ 640.281969] env[62522]: value = "task-2414973" [ 640.281969] env[62522]: _type = "Task" [ 640.281969] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.293104] env[62522]: DEBUG oslo_vmware.api [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] Task: {'id': task-2414973, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.296025] env[62522]: DEBUG nova.scheduler.client.report [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Updating ProviderTree inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 640.296025] env[62522]: DEBUG nova.compute.provider_tree [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 640.308860] env[62522]: DEBUG nova.scheduler.client.report [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Refreshing aggregate associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, aggregates: None {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 640.312301] env[62522]: DEBUG nova.network.neutron [req-87715d07-a8df-4c06-be4e-ab8739e9a93d req-7a0a18e9-5b1a-41fb-8d58-940e062c667c service nova] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Updated VIF entry in instance network info cache for port a0e9b152-7b65-405a-8302-dc8561d06224. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 640.312626] env[62522]: DEBUG nova.network.neutron [req-87715d07-a8df-4c06-be4e-ab8739e9a93d req-7a0a18e9-5b1a-41fb-8d58-940e062c667c service nova] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Updating instance_info_cache with network_info: [{"id": "a0e9b152-7b65-405a-8302-dc8561d06224", "address": "fa:16:3e:e0:65:aa", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0e9b152-7b", "ovs_interfaceid": "a0e9b152-7b65-405a-8302-dc8561d06224", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.337027] env[62522]: DEBUG nova.scheduler.client.report [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Refreshing trait associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 640.426901] env[62522]: DEBUG nova.network.neutron [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.445020] env[62522]: DEBUG oslo_vmware.api [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d4d560-af63-7b58-916e-94855f885ea3, 'name': SearchDatastore_Task, 'duration_secs': 0.015215} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.448437] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 640.449071] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 640.449071] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 640.449071] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.449273] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 640.450771] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-405f53fd-32f7-4768-97e0-0c4f6a8513a5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.459806] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 640.459981] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 640.460733] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9aa1732f-2fca-427c-80b2-a300d1ae4c7b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.477030] env[62522]: DEBUG oslo_vmware.api [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Waiting for the task: (returnval){ [ 640.477030] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522f3df3-ed2c-3383-20c1-7b6b04f0e889" [ 640.477030] env[62522]: _type = "Task" [ 640.477030] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.489619] env[62522]: DEBUG oslo_vmware.api [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522f3df3-ed2c-3383-20c1-7b6b04f0e889, 'name': SearchDatastore_Task, 'duration_secs': 0.008676} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.490694] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ae0294c-62fd-476b-903f-d6e9bc53ff65 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.495538] env[62522]: DEBUG oslo_vmware.api [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Waiting for the task: (returnval){ [ 640.495538] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52916b65-7ff8-f70f-b678-b4270b1d163a" [ 640.495538] env[62522]: _type = "Task" [ 640.495538] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.503178] env[62522]: DEBUG oslo_vmware.api [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52916b65-7ff8-f70f-b678-b4270b1d163a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.611722] env[62522]: DEBUG oslo_vmware.api [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2414970, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.184031} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.612039] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 640.612869] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f8965d4-9899-4b45-89a8-01adb392e4a8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.636316] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] 74b6ae10-a595-4139-8eda-38fe1aa298cf/74b6ae10-a595-4139-8eda-38fe1aa298cf.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 640.640081] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c37e902-37c0-4035-b99f-b2ef1f19b5e1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.668386] env[62522]: DEBUG oslo_vmware.api [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Task: {'id': task-2414971, 'name': PowerOnVM_Task, 'duration_secs': 0.503151} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.671398] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 640.672823] env[62522]: INFO nova.compute.manager [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Took 13.74 seconds to spawn the instance on the hypervisor. [ 640.672823] env[62522]: DEBUG nova.compute.manager [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 640.672823] env[62522]: DEBUG oslo_vmware.api [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 640.672823] env[62522]: value = "task-2414974" [ 640.672823] env[62522]: _type = "Task" [ 640.672823] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.677937] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8283b5b-05ac-4e14-b459-b281395d85ae {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.687158] env[62522]: DEBUG oslo_vmware.api [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Task: {'id': task-2414968, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.64205} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.687840] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Releasing lock "refresh_cache-678b6b5f-b410-4c55-872e-4a74da6d7ebc" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 640.688188] env[62522]: DEBUG nova.compute.manager [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Instance network_info: |[{"id": "20713b6e-4b87-4065-a83b-f62812551cd5", "address": "fa:16:3e:a5:e2:12", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.43", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20713b6e-4b", "ovs_interfaceid": "20713b6e-4b87-4065-a83b-f62812551cd5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 640.688773] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 758ed671-347a-4949-9842-2f8cdcd261ae/758ed671-347a-4949-9842-2f8cdcd261ae.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 640.689523] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 640.691021] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a5:e2:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f6d1427-d86b-4371-9172-50e4bb0eb1cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '20713b6e-4b87-4065-a83b-f62812551cd5', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 640.698912] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Creating folder: Project (209afab6e4c840fd8ae644e8d4d90fbe). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 640.699877] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2efd65d8-d834-418f-9a25-09e80927c973 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.711128] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7d96a10e-f77f-45d0-9488-174f996f50c2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.712256] env[62522]: DEBUG oslo_vmware.api [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2414974, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.719990] env[62522]: DEBUG oslo_vmware.api [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Waiting for the task: (returnval){ [ 640.719990] env[62522]: value = "task-2414975" [ 640.719990] env[62522]: _type = "Task" [ 640.719990] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.724368] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Created folder: Project (209afab6e4c840fd8ae644e8d4d90fbe) in parent group-v489562. [ 640.724882] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Creating folder: Instances. Parent ref: group-v489581. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 640.725806] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-608977c9-896f-4650-bf5a-314e96180c5e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.732244] env[62522]: DEBUG oslo_vmware.api [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Task: {'id': task-2414975, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.737084] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Created folder: Instances in parent group-v489581. [ 640.739201] env[62522]: DEBUG oslo.service.loopingcall [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 640.739201] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 640.739201] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5021a713-692d-411e-bb44-f95d33dc78f9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.754468] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c33fa61-8927-4354-a633-b52a4080a7d3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.763707] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62911302-b1b7-4134-851f-8252390d0254 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.766782] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 640.766782] env[62522]: value = "task-2414978" [ 640.766782] env[62522]: _type = "Task" [ 640.766782] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.800238] env[62522]: DEBUG nova.network.neutron [req-a0d30c29-5309-4e10-b6ff-301f66d0146b req-581da1b0-216d-4fdf-9d90-0394fe043fdf service nova] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Updated VIF entry in instance network info cache for port fae6b6fe-00ac-409a-be5f-719500f98702. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 640.800667] env[62522]: DEBUG nova.network.neutron [req-a0d30c29-5309-4e10-b6ff-301f66d0146b req-581da1b0-216d-4fdf-9d90-0394fe043fdf service nova] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Updating instance_info_cache with network_info: [{"id": "fae6b6fe-00ac-409a-be5f-719500f98702", "address": "fa:16:3e:3d:b2:9f", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.51", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfae6b6fe-00", "ovs_interfaceid": "fae6b6fe-00ac-409a-be5f-719500f98702", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.806684] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43766709-475b-4985-8522-128b5e24707e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.814384] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2414978, 'name': CreateVM_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.815781] env[62522]: DEBUG oslo_concurrency.lockutils [req-87715d07-a8df-4c06-be4e-ab8739e9a93d req-7a0a18e9-5b1a-41fb-8d58-940e062c667c service nova] Releasing lock "refresh_cache-3824a70e-8498-410a-904d-c7cd0de0c358" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 640.819140] env[62522]: DEBUG oslo_vmware.api [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] Task: {'id': task-2414973, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.179733} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.821669] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 640.821879] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 640.822043] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 640.822220] env[62522]: INFO nova.compute.manager [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Took 1.16 seconds to destroy the instance on the hypervisor. [ 640.822457] env[62522]: DEBUG oslo.service.loopingcall [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 640.822931] env[62522]: DEBUG nova.compute.manager [-] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 640.823046] env[62522]: DEBUG nova.network.neutron [-] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 640.825674] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c370c8a4-3a2b-4790-a996-df578a4ca061 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.846042] env[62522]: DEBUG nova.compute.provider_tree [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 640.932041] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Releasing lock "refresh_cache-433387e7-8de9-4cfb-9012-8652c65b5b97" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 640.934654] env[62522]: DEBUG nova.compute.manager [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62522) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 640.934654] env[62522]: DEBUG nova.compute.manager [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 640.934654] env[62522]: DEBUG nova.network.neutron [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 640.967399] env[62522]: DEBUG nova.network.neutron [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 641.006753] env[62522]: DEBUG oslo_vmware.api [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52916b65-7ff8-f70f-b678-b4270b1d163a, 'name': SearchDatastore_Task, 'duration_secs': 0.008311} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.007012] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 641.007278] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 3824a70e-8498-410a-904d-c7cd0de0c358/3824a70e-8498-410a-904d-c7cd0de0c358.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 641.007562] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9b7f351f-aa31-4655-bff0-bffff02d78ba {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.013780] env[62522]: DEBUG oslo_vmware.api [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Waiting for the task: (returnval){ [ 641.013780] env[62522]: value = "task-2414979" [ 641.013780] env[62522]: _type = "Task" [ 641.013780] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.021868] env[62522]: DEBUG oslo_vmware.api [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': task-2414979, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.191457] env[62522]: DEBUG oslo_vmware.api [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2414974, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.227755] env[62522]: INFO nova.compute.manager [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Took 19.78 seconds to build instance. [ 641.237906] env[62522]: DEBUG oslo_vmware.api [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Task: {'id': task-2414975, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.282927] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2414978, 'name': CreateVM_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.315577] env[62522]: DEBUG oslo_concurrency.lockutils [req-a0d30c29-5309-4e10-b6ff-301f66d0146b req-581da1b0-216d-4fdf-9d90-0394fe043fdf service nova] Releasing lock "refresh_cache-758ed671-347a-4949-9842-2f8cdcd261ae" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 641.377167] env[62522]: ERROR nova.scheduler.client.report [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [req-a0df2aa0-ee05-4f80-9540-ff71a088adb2] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c7fa38b2-245d-4337-a012-22c1a01c0a72. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a0df2aa0-ee05-4f80-9540-ff71a088adb2"}]} [ 641.398349] env[62522]: DEBUG nova.scheduler.client.report [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Refreshing inventories for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 641.414483] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquiring lock "879354d3-7423-41e2-93f6-0d8d3a120170" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 641.414726] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "879354d3-7423-41e2-93f6-0d8d3a120170" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 641.423462] env[62522]: DEBUG nova.scheduler.client.report [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Updating ProviderTree inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 641.423686] env[62522]: DEBUG nova.compute.provider_tree [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 641.444118] env[62522]: DEBUG nova.scheduler.client.report [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Refreshing aggregate associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, aggregates: None {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 641.476466] env[62522]: DEBUG nova.network.neutron [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.489495] env[62522]: DEBUG nova.scheduler.client.report [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Refreshing trait associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 641.531159] env[62522]: DEBUG oslo_vmware.api [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': task-2414979, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.696167] env[62522]: DEBUG oslo_vmware.api [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2414974, 'name': ReconfigVM_Task, 'duration_secs': 0.809115} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.701484] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Reconfigured VM instance instance-00000004 to attach disk [datastore1] 74b6ae10-a595-4139-8eda-38fe1aa298cf/74b6ae10-a595-4139-8eda-38fe1aa298cf.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 641.702398] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-add8082b-519a-4939-a251-8427c93d37f0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.709684] env[62522]: DEBUG oslo_vmware.api [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 641.709684] env[62522]: value = "task-2414980" [ 641.709684] env[62522]: _type = "Task" [ 641.709684] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.725670] env[62522]: DEBUG oslo_vmware.api [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2414980, 'name': Rename_Task} progress is 10%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.733846] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8fb9762f-30e7-4b85-98c5-178b779da64c tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Lock "7828f9c8-fc02-4218-ba93-5362af807dad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.300s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 641.734124] env[62522]: DEBUG oslo_vmware.api [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Task: {'id': task-2414975, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.754375} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.737815] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 641.739064] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b0c96a2-41f4-49f7-9c59-4552b79453db {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.762036] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] 758ed671-347a-4949-9842-2f8cdcd261ae/758ed671-347a-4949-9842-2f8cdcd261ae.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 641.765457] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-769edf52-9554-4ba0-a3d2-2352631756e4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.795327] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2414978, 'name': CreateVM_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.795327] env[62522]: DEBUG oslo_vmware.api [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Waiting for the task: (returnval){ [ 641.795327] env[62522]: value = "task-2414981" [ 641.795327] env[62522]: _type = "Task" [ 641.795327] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.803832] env[62522]: DEBUG oslo_vmware.api [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Task: {'id': task-2414981, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.946098] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc520ea-f5f5-4717-884e-58b032c9836e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.957312] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbddb65f-423b-42d3-885e-530bb1dcd182 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.998171] env[62522]: INFO nova.compute.manager [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 433387e7-8de9-4cfb-9012-8652c65b5b97] Took 1.07 seconds to deallocate network for instance. [ 642.002776] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f4727bf-4539-427b-8cf0-43e6ba6e2609 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.014878] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-741c8c1b-3012-44fb-8a9f-9b859fbdbc77 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.033195] env[62522]: DEBUG nova.compute.provider_tree [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 642.036933] env[62522]: DEBUG oslo_vmware.api [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': task-2414979, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.58142} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.037466] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 3824a70e-8498-410a-904d-c7cd0de0c358/3824a70e-8498-410a-904d-c7cd0de0c358.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 642.037675] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 642.038157] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7f538235-72a1-431f-93b4-b2c521e6a8aa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.045153] env[62522]: DEBUG oslo_vmware.api [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Waiting for the task: (returnval){ [ 642.045153] env[62522]: value = "task-2414982" [ 642.045153] env[62522]: _type = "Task" [ 642.045153] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.054778] env[62522]: DEBUG oslo_vmware.api [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': task-2414982, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.060957] env[62522]: DEBUG nova.network.neutron [-] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 642.221610] env[62522]: DEBUG oslo_vmware.api [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2414980, 'name': Rename_Task, 'duration_secs': 0.25748} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.221856] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 642.222145] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-981c34a0-648b-4bce-a1e0-a2edb6799566 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.230104] env[62522]: DEBUG oslo_vmware.api [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 642.230104] env[62522]: value = "task-2414983" [ 642.230104] env[62522]: _type = "Task" [ 642.230104] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.239038] env[62522]: DEBUG nova.compute.manager [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 642.243549] env[62522]: DEBUG oslo_vmware.api [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2414983, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.292664] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2414978, 'name': CreateVM_Task, 'duration_secs': 1.335291} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.292918] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 642.293713] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.293881] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.294266] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 642.297742] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e88d2a88-b0d3-4475-b260-e3d556cfb708 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.307791] env[62522]: DEBUG oslo_vmware.api [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Waiting for the task: (returnval){ [ 642.307791] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b377ea-fdfd-059a-924a-bd95ce3597bf" [ 642.307791] env[62522]: _type = "Task" [ 642.307791] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.308469] env[62522]: DEBUG oslo_vmware.api [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Task: {'id': task-2414981, 'name': ReconfigVM_Task, 'duration_secs': 0.382264} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.312839] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Reconfigured VM instance instance-00000005 to attach disk [datastore1] 758ed671-347a-4949-9842-2f8cdcd261ae/758ed671-347a-4949-9842-2f8cdcd261ae.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 642.320031] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7646736d-4799-4947-bcab-33ae0e355816 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.328990] env[62522]: DEBUG oslo_vmware.api [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b377ea-fdfd-059a-924a-bd95ce3597bf, 'name': SearchDatastore_Task, 'duration_secs': 0.015698} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.333395] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 642.334013] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 642.334013] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.334013] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.334249] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 642.334524] env[62522]: DEBUG oslo_vmware.api [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Waiting for the task: (returnval){ [ 642.334524] env[62522]: value = "task-2414984" [ 642.334524] env[62522]: _type = "Task" [ 642.334524] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.335080] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-09a8aeab-8dde-4e10-9041-e6bb2b80f9d5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.344182] env[62522]: DEBUG nova.compute.manager [req-fefcd463-cb61-48b2-af5c-ff54bdd955fd req-c5851d0f-a1d9-4d32-88ab-8f84b61fbee0 service nova] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Received event network-vif-plugged-20713b6e-4b87-4065-a83b-f62812551cd5 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 642.344298] env[62522]: DEBUG oslo_concurrency.lockutils [req-fefcd463-cb61-48b2-af5c-ff54bdd955fd req-c5851d0f-a1d9-4d32-88ab-8f84b61fbee0 service nova] Acquiring lock "678b6b5f-b410-4c55-872e-4a74da6d7ebc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 642.344447] env[62522]: DEBUG oslo_concurrency.lockutils [req-fefcd463-cb61-48b2-af5c-ff54bdd955fd req-c5851d0f-a1d9-4d32-88ab-8f84b61fbee0 service nova] Lock "678b6b5f-b410-4c55-872e-4a74da6d7ebc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.344707] env[62522]: DEBUG oslo_concurrency.lockutils [req-fefcd463-cb61-48b2-af5c-ff54bdd955fd req-c5851d0f-a1d9-4d32-88ab-8f84b61fbee0 service nova] Lock "678b6b5f-b410-4c55-872e-4a74da6d7ebc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 642.344823] env[62522]: DEBUG nova.compute.manager [req-fefcd463-cb61-48b2-af5c-ff54bdd955fd req-c5851d0f-a1d9-4d32-88ab-8f84b61fbee0 service nova] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] No waiting events found dispatching network-vif-plugged-20713b6e-4b87-4065-a83b-f62812551cd5 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 642.344889] env[62522]: WARNING nova.compute.manager [req-fefcd463-cb61-48b2-af5c-ff54bdd955fd req-c5851d0f-a1d9-4d32-88ab-8f84b61fbee0 service nova] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Received unexpected event network-vif-plugged-20713b6e-4b87-4065-a83b-f62812551cd5 for instance with vm_state building and task_state spawning. [ 642.345085] env[62522]: DEBUG nova.compute.manager [req-fefcd463-cb61-48b2-af5c-ff54bdd955fd req-c5851d0f-a1d9-4d32-88ab-8f84b61fbee0 service nova] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Received event network-changed-20713b6e-4b87-4065-a83b-f62812551cd5 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 642.345245] env[62522]: DEBUG nova.compute.manager [req-fefcd463-cb61-48b2-af5c-ff54bdd955fd req-c5851d0f-a1d9-4d32-88ab-8f84b61fbee0 service nova] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Refreshing instance network info cache due to event network-changed-20713b6e-4b87-4065-a83b-f62812551cd5. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 642.345430] env[62522]: DEBUG oslo_concurrency.lockutils [req-fefcd463-cb61-48b2-af5c-ff54bdd955fd req-c5851d0f-a1d9-4d32-88ab-8f84b61fbee0 service nova] Acquiring lock "refresh_cache-678b6b5f-b410-4c55-872e-4a74da6d7ebc" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.345603] env[62522]: DEBUG oslo_concurrency.lockutils [req-fefcd463-cb61-48b2-af5c-ff54bdd955fd req-c5851d0f-a1d9-4d32-88ab-8f84b61fbee0 service nova] Acquired lock "refresh_cache-678b6b5f-b410-4c55-872e-4a74da6d7ebc" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.345767] env[62522]: DEBUG nova.network.neutron [req-fefcd463-cb61-48b2-af5c-ff54bdd955fd req-c5851d0f-a1d9-4d32-88ab-8f84b61fbee0 service nova] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Refreshing network info cache for port 20713b6e-4b87-4065-a83b-f62812551cd5 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 642.354844] env[62522]: DEBUG oslo_vmware.api [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Task: {'id': task-2414984, 'name': Rename_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.357793] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 642.358729] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 642.359995] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b84226b3-8373-4c63-877f-52d32947a320 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.367235] env[62522]: DEBUG oslo_vmware.api [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Waiting for the task: (returnval){ [ 642.367235] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d4185d-080d-8f35-3f75-4f56d930db6c" [ 642.367235] env[62522]: _type = "Task" [ 642.367235] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.376307] env[62522]: DEBUG oslo_vmware.api [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d4185d-080d-8f35-3f75-4f56d930db6c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.557116] env[62522]: DEBUG oslo_vmware.api [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': task-2414982, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080541} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.557116] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 642.557116] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed66e3c4-cec1-42fa-b1cb-51ba5511c9b5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.571085] env[62522]: INFO nova.compute.manager [-] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Took 1.75 seconds to deallocate network for instance. [ 642.579986] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] 3824a70e-8498-410a-904d-c7cd0de0c358/3824a70e-8498-410a-904d-c7cd0de0c358.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 642.581737] env[62522]: DEBUG nova.scheduler.client.report [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Updated inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with generation 24 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 642.581971] env[62522]: DEBUG nova.compute.provider_tree [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Updating resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 generation from 24 to 25 during operation: update_inventory {{(pid=62522) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 642.582170] env[62522]: DEBUG nova.compute.provider_tree [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 642.586449] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-54da35ff-71cd-4f85-849e-26f3fc06a3fc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.603043] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.386s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 642.603613] env[62522]: DEBUG nova.compute.manager [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 642.609100] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.452s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.610523] env[62522]: INFO nova.compute.claims [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 642.618985] env[62522]: DEBUG oslo_vmware.api [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Waiting for the task: (returnval){ [ 642.618985] env[62522]: value = "task-2414985" [ 642.618985] env[62522]: _type = "Task" [ 642.618985] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.628285] env[62522]: DEBUG oslo_vmware.api [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': task-2414985, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.741228] env[62522]: DEBUG oslo_vmware.api [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2414983, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.767964] env[62522]: DEBUG oslo_concurrency.lockutils [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 642.848710] env[62522]: DEBUG oslo_vmware.api [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Task: {'id': task-2414984, 'name': Rename_Task, 'duration_secs': 0.193866} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.851690] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 642.851690] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a7a7844e-184a-4440-a6ef-483226b2a574 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.858052] env[62522]: DEBUG oslo_vmware.api [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Waiting for the task: (returnval){ [ 642.858052] env[62522]: value = "task-2414986" [ 642.858052] env[62522]: _type = "Task" [ 642.858052] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.870984] env[62522]: DEBUG oslo_vmware.api [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Task: {'id': task-2414986, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.882305] env[62522]: DEBUG oslo_vmware.api [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d4185d-080d-8f35-3f75-4f56d930db6c, 'name': SearchDatastore_Task, 'duration_secs': 0.00793} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.882442] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9ad518e-692c-4943-a85f-77070b592e12 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.887922] env[62522]: DEBUG oslo_vmware.api [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Waiting for the task: (returnval){ [ 642.887922] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bdb9ae-65d1-c072-719b-2c1ac159552e" [ 642.887922] env[62522]: _type = "Task" [ 642.887922] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.896452] env[62522]: DEBUG oslo_vmware.api [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bdb9ae-65d1-c072-719b-2c1ac159552e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.049177] env[62522]: INFO nova.scheduler.client.report [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Deleted allocations for instance 433387e7-8de9-4cfb-9012-8652c65b5b97 [ 643.114983] env[62522]: DEBUG nova.compute.utils [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 643.120794] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 643.120794] env[62522]: DEBUG nova.compute.manager [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 643.120794] env[62522]: DEBUG nova.network.neutron [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 643.135683] env[62522]: DEBUG oslo_vmware.api [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': task-2414985, 'name': ReconfigVM_Task, 'duration_secs': 0.392276} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.135944] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Reconfigured VM instance instance-00000006 to attach disk [datastore1] 3824a70e-8498-410a-904d-c7cd0de0c358/3824a70e-8498-410a-904d-c7cd0de0c358.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 643.136783] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4dcdc7ae-4f3a-4512-8564-e991307163fd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.150386] env[62522]: DEBUG oslo_vmware.api [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Waiting for the task: (returnval){ [ 643.150386] env[62522]: value = "task-2414987" [ 643.150386] env[62522]: _type = "Task" [ 643.150386] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.162446] env[62522]: DEBUG oslo_vmware.api [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': task-2414987, 'name': Rename_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.250673] env[62522]: DEBUG oslo_vmware.api [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2414983, 'name': PowerOnVM_Task, 'duration_secs': 1.008533} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.250957] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 643.250957] env[62522]: INFO nova.compute.manager [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Took 13.99 seconds to spawn the instance on the hypervisor. [ 643.251295] env[62522]: DEBUG nova.compute.manager [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 643.252589] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a90aecd-fbcc-4b3a-b1fe-ab7ecd79868a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.370952] env[62522]: DEBUG oslo_vmware.api [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Task: {'id': task-2414986, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.377680] env[62522]: DEBUG nova.policy [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c99439422d6d48b9a6229c0956c10c71', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cbd92adc76814720ac43cd9c99d21209', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 643.404136] env[62522]: DEBUG oslo_vmware.api [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bdb9ae-65d1-c072-719b-2c1ac159552e, 'name': SearchDatastore_Task, 'duration_secs': 0.012903} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.404533] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 643.405343] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 678b6b5f-b410-4c55-872e-4a74da6d7ebc/678b6b5f-b410-4c55-872e-4a74da6d7ebc.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 643.405343] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-406ac71a-ca86-4108-8b83-af765a48a5a9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.417048] env[62522]: DEBUG oslo_vmware.api [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Waiting for the task: (returnval){ [ 643.417048] env[62522]: value = "task-2414988" [ 643.417048] env[62522]: _type = "Task" [ 643.417048] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.425630] env[62522]: DEBUG oslo_vmware.api [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Task: {'id': task-2414988, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.471171] env[62522]: DEBUG oslo_concurrency.lockutils [None req-02931f0c-be4c-4034-998e-867499e2d977 tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Acquiring lock "7828f9c8-fc02-4218-ba93-5362af807dad" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 643.471465] env[62522]: DEBUG oslo_concurrency.lockutils [None req-02931f0c-be4c-4034-998e-867499e2d977 tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Lock "7828f9c8-fc02-4218-ba93-5362af807dad" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 643.471704] env[62522]: DEBUG oslo_concurrency.lockutils [None req-02931f0c-be4c-4034-998e-867499e2d977 tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Acquiring lock "7828f9c8-fc02-4218-ba93-5362af807dad-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 643.471909] env[62522]: DEBUG oslo_concurrency.lockutils [None req-02931f0c-be4c-4034-998e-867499e2d977 tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Lock "7828f9c8-fc02-4218-ba93-5362af807dad-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 643.472120] env[62522]: DEBUG oslo_concurrency.lockutils [None req-02931f0c-be4c-4034-998e-867499e2d977 tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Lock "7828f9c8-fc02-4218-ba93-5362af807dad-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 643.475087] env[62522]: INFO nova.compute.manager [None req-02931f0c-be4c-4034-998e-867499e2d977 tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Terminating instance [ 643.558010] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9a058760-af28-48f1-b6fe-8f276ae528fe tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Lock "433387e7-8de9-4cfb-9012-8652c65b5b97" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.073s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 643.623333] env[62522]: DEBUG nova.network.neutron [req-fefcd463-cb61-48b2-af5c-ff54bdd955fd req-c5851d0f-a1d9-4d32-88ab-8f84b61fbee0 service nova] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Updated VIF entry in instance network info cache for port 20713b6e-4b87-4065-a83b-f62812551cd5. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 643.623623] env[62522]: DEBUG nova.network.neutron [req-fefcd463-cb61-48b2-af5c-ff54bdd955fd req-c5851d0f-a1d9-4d32-88ab-8f84b61fbee0 service nova] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Updating instance_info_cache with network_info: [{"id": "20713b6e-4b87-4065-a83b-f62812551cd5", "address": "fa:16:3e:a5:e2:12", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.43", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20713b6e-4b", "ovs_interfaceid": "20713b6e-4b87-4065-a83b-f62812551cd5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.625319] env[62522]: DEBUG nova.compute.manager [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 643.664038] env[62522]: DEBUG oslo_vmware.api [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': task-2414987, 'name': Rename_Task, 'duration_secs': 0.169612} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.664334] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 643.665125] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5ac18b79-df00-4880-b07e-d44ffe0a94b6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.680063] env[62522]: DEBUG oslo_vmware.api [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Waiting for the task: (returnval){ [ 643.680063] env[62522]: value = "task-2414989" [ 643.680063] env[62522]: _type = "Task" [ 643.680063] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.691255] env[62522]: DEBUG oslo_vmware.api [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': task-2414989, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.778133] env[62522]: INFO nova.compute.manager [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Took 21.93 seconds to build instance. [ 643.872900] env[62522]: DEBUG oslo_vmware.api [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Task: {'id': task-2414986, 'name': PowerOnVM_Task, 'duration_secs': 0.539355} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.875741] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 643.876364] env[62522]: INFO nova.compute.manager [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Took 12.27 seconds to spawn the instance on the hypervisor. [ 643.876429] env[62522]: DEBUG nova.compute.manager [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 643.877475] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67eb7175-4fae-44be-8928-bb3592411004 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.934263] env[62522]: DEBUG oslo_vmware.api [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Task: {'id': task-2414988, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.982398] env[62522]: DEBUG nova.compute.manager [None req-02931f0c-be4c-4034-998e-867499e2d977 tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 643.982605] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-02931f0c-be4c-4034-998e-867499e2d977 tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 643.983647] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b40b3ef-6ae2-4c90-97e2-77d3c599b4ca {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.991651] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-02931f0c-be4c-4034-998e-867499e2d977 tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 643.992763] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ecfe5215-159a-4c40-a387-462475e27f40 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.001993] env[62522]: DEBUG oslo_vmware.api [None req-02931f0c-be4c-4034-998e-867499e2d977 tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Waiting for the task: (returnval){ [ 644.001993] env[62522]: value = "task-2414990" [ 644.001993] env[62522]: _type = "Task" [ 644.001993] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.010834] env[62522]: DEBUG oslo_vmware.api [None req-02931f0c-be4c-4034-998e-867499e2d977 tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Task: {'id': task-2414990, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.012496] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e7808a7-970e-42dd-bbcd-99c8561331e2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.020587] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69a8f9a3-e4fc-47ba-9d9b-0111223635ee {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.055613] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bf8caad-06a7-422e-95f9-1a9436d10bb1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.061221] env[62522]: DEBUG nova.compute.manager [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 644.072027] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8254e6d5-b7df-427f-874f-507c4254e599 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.087892] env[62522]: DEBUG nova.compute.provider_tree [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 644.134096] env[62522]: DEBUG oslo_concurrency.lockutils [req-fefcd463-cb61-48b2-af5c-ff54bdd955fd req-c5851d0f-a1d9-4d32-88ab-8f84b61fbee0 service nova] Releasing lock "refresh_cache-678b6b5f-b410-4c55-872e-4a74da6d7ebc" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 644.194035] env[62522]: DEBUG oslo_vmware.api [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': task-2414989, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.281038] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7f398e63-4ffe-42ca-97fd-a149233861be tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Lock "74b6ae10-a595-4139-8eda-38fe1aa298cf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.446s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 644.408824] env[62522]: INFO nova.compute.manager [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Took 22.39 seconds to build instance. [ 644.426795] env[62522]: DEBUG oslo_vmware.api [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Task: {'id': task-2414988, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.515046] env[62522]: DEBUG oslo_vmware.api [None req-02931f0c-be4c-4034-998e-867499e2d977 tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Task: {'id': task-2414990, 'name': PowerOffVM_Task, 'duration_secs': 0.339287} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.515849] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-02931f0c-be4c-4034-998e-867499e2d977 tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 644.516042] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-02931f0c-be4c-4034-998e-867499e2d977 tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 644.516423] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1b48daa2-38fc-4973-9ced-46394a76fef3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.583935] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-02931f0c-be4c-4034-998e-867499e2d977 tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 644.584165] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-02931f0c-be4c-4034-998e-867499e2d977 tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 644.584336] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-02931f0c-be4c-4034-998e-867499e2d977 tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Deleting the datastore file [datastore1] 7828f9c8-fc02-4218-ba93-5362af807dad {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 644.584609] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8579d365-da03-4ddd-909a-ad5b92057021 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.591435] env[62522]: DEBUG nova.scheduler.client.report [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 644.596254] env[62522]: DEBUG oslo_vmware.api [None req-02931f0c-be4c-4034-998e-867499e2d977 tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Waiting for the task: (returnval){ [ 644.596254] env[62522]: value = "task-2414992" [ 644.596254] env[62522]: _type = "Task" [ 644.596254] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.605828] env[62522]: DEBUG oslo_vmware.api [None req-02931f0c-be4c-4034-998e-867499e2d977 tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Task: {'id': task-2414992, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.608131] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 644.639226] env[62522]: DEBUG nova.compute.manager [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 644.671770] env[62522]: DEBUG nova.virt.hardware [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 644.671863] env[62522]: DEBUG nova.virt.hardware [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 644.672020] env[62522]: DEBUG nova.virt.hardware [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 644.672522] env[62522]: DEBUG nova.virt.hardware [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 644.672677] env[62522]: DEBUG nova.virt.hardware [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 644.673022] env[62522]: DEBUG nova.virt.hardware [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 644.673098] env[62522]: DEBUG nova.virt.hardware [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 644.673201] env[62522]: DEBUG nova.virt.hardware [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 644.674068] env[62522]: DEBUG nova.virt.hardware [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 644.674068] env[62522]: DEBUG nova.virt.hardware [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 644.674068] env[62522]: DEBUG nova.virt.hardware [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 644.674624] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce87973c-5d0e-4c7a-b393-23425a2725ce {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.689987] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24158942-3d4c-44fd-8000-8c94b29219b3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.700946] env[62522]: DEBUG oslo_vmware.api [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': task-2414989, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.784688] env[62522]: DEBUG nova.compute.manager [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 644.908055] env[62522]: DEBUG oslo_concurrency.lockutils [None req-af1426dd-9f1c-4637-9315-99eca5c2c283 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Lock "758ed671-347a-4949-9842-2f8cdcd261ae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.925s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 644.929128] env[62522]: DEBUG oslo_vmware.api [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Task: {'id': task-2414988, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.400127} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.929128] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 678b6b5f-b410-4c55-872e-4a74da6d7ebc/678b6b5f-b410-4c55-872e-4a74da6d7ebc.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 644.929128] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 644.929128] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-30fcc731-b18e-4356-8a2c-fc2362913e30 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.937354] env[62522]: DEBUG oslo_vmware.api [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Waiting for the task: (returnval){ [ 644.937354] env[62522]: value = "task-2414993" [ 644.937354] env[62522]: _type = "Task" [ 644.937354] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.945759] env[62522]: DEBUG oslo_vmware.api [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Task: {'id': task-2414993, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.007027] env[62522]: DEBUG nova.network.neutron [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Successfully created port: 13658e84-5e72-4437-ab9f-9ca4363e4eff {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 645.104493] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.495s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 645.105195] env[62522]: DEBUG nova.compute.manager [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 645.109062] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 18.842s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 645.113019] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 645.113019] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62522) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 645.113019] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.750s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 645.113019] env[62522]: INFO nova.compute.claims [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 645.116544] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8831ff0-13b9-4795-ac08-7bf62244402a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.124378] env[62522]: DEBUG oslo_vmware.api [None req-02931f0c-be4c-4034-998e-867499e2d977 tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Task: {'id': task-2414992, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.347388} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.126872] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-02931f0c-be4c-4034-998e-867499e2d977 tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 645.126872] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-02931f0c-be4c-4034-998e-867499e2d977 tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 645.126872] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-02931f0c-be4c-4034-998e-867499e2d977 tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 645.126872] env[62522]: INFO nova.compute.manager [None req-02931f0c-be4c-4034-998e-867499e2d977 tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Took 1.14 seconds to destroy the instance on the hypervisor. [ 645.126872] env[62522]: DEBUG oslo.service.loopingcall [None req-02931f0c-be4c-4034-998e-867499e2d977 tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 645.127068] env[62522]: DEBUG nova.compute.manager [-] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 645.127068] env[62522]: DEBUG nova.network.neutron [-] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 645.133738] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d205651-9d44-42be-b7a2-07fc5c6c89d5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.148498] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e18cabb8-4f38-46a9-a8d8-1210dffaca47 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.156784] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-920c833d-ffe9-436b-ba9f-55e834dca640 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.195587] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181201MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=62522) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 645.195765] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 645.206394] env[62522]: DEBUG oslo_vmware.api [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': task-2414989, 'name': PowerOnVM_Task, 'duration_secs': 1.38118} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.206757] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 645.207033] env[62522]: INFO nova.compute.manager [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Took 11.24 seconds to spawn the instance on the hypervisor. [ 645.207278] env[62522]: DEBUG nova.compute.manager [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 645.208157] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c85ebc1-630a-4b2c-b634-07a9fd40bd5b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.324403] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 645.412150] env[62522]: DEBUG nova.compute.manager [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 645.455390] env[62522]: DEBUG oslo_vmware.api [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Task: {'id': task-2414993, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.189693} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.455676] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 645.456611] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3affeca6-96d7-4384-a2bb-8824807d1866 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.489764] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] 678b6b5f-b410-4c55-872e-4a74da6d7ebc/678b6b5f-b410-4c55-872e-4a74da6d7ebc.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 645.491590] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6642e25-39ca-40e3-89b9-393ff26591eb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.512023] env[62522]: DEBUG oslo_vmware.api [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Waiting for the task: (returnval){ [ 645.512023] env[62522]: value = "task-2414994" [ 645.512023] env[62522]: _type = "Task" [ 645.512023] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.520809] env[62522]: DEBUG oslo_vmware.api [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Task: {'id': task-2414994, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.619553] env[62522]: DEBUG nova.compute.utils [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 645.624439] env[62522]: DEBUG nova.compute.manager [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Not allocating networking since 'none' was specified. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 645.733375] env[62522]: INFO nova.compute.manager [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Took 23.13 seconds to build instance. [ 645.956378] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 646.032481] env[62522]: DEBUG oslo_vmware.api [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Task: {'id': task-2414994, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.129111] env[62522]: DEBUG nova.compute.manager [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 646.238283] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5f4573c9-f822-42f1-92e9-ec45508d3020 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Lock "3824a70e-8498-410a-904d-c7cd0de0c358" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.638s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 646.326795] env[62522]: DEBUG nova.compute.manager [req-81a33f4f-82b4-4d7e-8437-018c4051beab req-e4c85d63-42a5-4938-ad25-e13229df75da service nova] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Received event network-vif-deleted-36173a2e-7d22-4ac6-aa18-ef15b74e3de1 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 646.508292] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58bddabe-418f-4ad5-a43b-7216111fac8b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.521416] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56ad2d4a-f6f1-46f8-b718-c7e45b96a665 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.533558] env[62522]: DEBUG oslo_vmware.api [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Task: {'id': task-2414994, 'name': ReconfigVM_Task, 'duration_secs': 0.582459} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.566971] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Reconfigured VM instance instance-00000007 to attach disk [datastore1] 678b6b5f-b410-4c55-872e-4a74da6d7ebc/678b6b5f-b410-4c55-872e-4a74da6d7ebc.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 646.568618] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-77a4afe1-c8a1-4952-b5f5-8bf183030164 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.571870] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccce2f9f-d796-4a93-8528-6ef6a595f59d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.584031] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31568448-57ec-4252-b817-65dcadf6da6c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.587986] env[62522]: DEBUG oslo_vmware.api [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Waiting for the task: (returnval){ [ 646.587986] env[62522]: value = "task-2414995" [ 646.587986] env[62522]: _type = "Task" [ 646.587986] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.600097] env[62522]: DEBUG nova.compute.provider_tree [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 646.608899] env[62522]: DEBUG oslo_vmware.api [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Task: {'id': task-2414995, 'name': Rename_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.638855] env[62522]: DEBUG nova.network.neutron [-] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.666376] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "19d3d54c-5ba1-420f-b012-a08add8546c9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 646.667032] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "19d3d54c-5ba1-420f-b012-a08add8546c9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 646.701438] env[62522]: DEBUG nova.compute.manager [None req-13cf2a46-4b48-43c0-9c92-e09849af6f31 tempest-ServerDiagnosticsV248Test-1103873209 tempest-ServerDiagnosticsV248Test-1103873209-project-admin] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 646.702850] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc41ed50-c55e-4e3e-bf91-de1b8780fa1a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.713274] env[62522]: INFO nova.compute.manager [None req-13cf2a46-4b48-43c0-9c92-e09849af6f31 tempest-ServerDiagnosticsV248Test-1103873209 tempest-ServerDiagnosticsV248Test-1103873209-project-admin] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Retrieving diagnostics [ 646.713868] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585939d4-a835-44e4-a7dd-5f8981a7ae12 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.747316] env[62522]: DEBUG nova.compute.manager [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 647.106131] env[62522]: DEBUG oslo_vmware.api [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Task: {'id': task-2414995, 'name': Rename_Task, 'duration_secs': 0.376714} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.106131] env[62522]: DEBUG nova.scheduler.client.report [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 647.107808] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 647.108205] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ce02430e-4e2c-4309-9ba3-4ed80e50e91d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.116560] env[62522]: DEBUG oslo_vmware.api [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Waiting for the task: (returnval){ [ 647.116560] env[62522]: value = "task-2414996" [ 647.116560] env[62522]: _type = "Task" [ 647.116560] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.124566] env[62522]: DEBUG oslo_vmware.api [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Task: {'id': task-2414996, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.148911] env[62522]: INFO nova.compute.manager [-] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Took 2.02 seconds to deallocate network for instance. [ 647.149971] env[62522]: DEBUG nova.compute.manager [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 647.183040] env[62522]: DEBUG nova.compute.manager [None req-bd6a585d-61c7-424b-ab68-95c5d2586373 tempest-ServerDiagnosticsTest-979273236 tempest-ServerDiagnosticsTest-979273236-project-admin] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 647.183040] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89e680ce-02ef-409d-b232-7a93fd3710a3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.189564] env[62522]: INFO nova.compute.manager [None req-bd6a585d-61c7-424b-ab68-95c5d2586373 tempest-ServerDiagnosticsTest-979273236 tempest-ServerDiagnosticsTest-979273236-project-admin] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Retrieving diagnostics [ 647.192070] env[62522]: DEBUG nova.virt.hardware [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 647.192348] env[62522]: DEBUG nova.virt.hardware [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 647.192506] env[62522]: DEBUG nova.virt.hardware [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 647.192708] env[62522]: DEBUG nova.virt.hardware [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 647.192862] env[62522]: DEBUG nova.virt.hardware [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 647.193124] env[62522]: DEBUG nova.virt.hardware [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 647.193399] env[62522]: DEBUG nova.virt.hardware [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 647.193568] env[62522]: DEBUG nova.virt.hardware [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 647.193784] env[62522]: DEBUG nova.virt.hardware [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 647.193955] env[62522]: DEBUG nova.virt.hardware [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 647.194162] env[62522]: DEBUG nova.virt.hardware [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 647.195072] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-714d1cd1-1875-4b02-a4cf-a738c89686ee {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.199087] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f024957-eabf-4bdc-af58-e9ac938dc06e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.233675] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6478aca-0ac0-4b11-9d57-d7c131df9c91 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.255293] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Instance VIF info [] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 647.260061] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Creating folder: Project (6a1701c3651546a7b5a39bcf8f06cff2). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 647.263053] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-52c82695-88f9-4440-897e-e2cfc059db45 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.274057] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Created folder: Project (6a1701c3651546a7b5a39bcf8f06cff2) in parent group-v489562. [ 647.274273] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Creating folder: Instances. Parent ref: group-v489584. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 647.274518] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4c331403-032b-4a37-8bbc-e4374a434c7e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.281386] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.284774] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Created folder: Instances in parent group-v489584. [ 647.285026] env[62522]: DEBUG oslo.service.loopingcall [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 647.285223] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 647.285424] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2b953a3a-cafe-4f2c-ba8f-9a164f4e23b8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.303532] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 647.303532] env[62522]: value = "task-2414999" [ 647.303532] env[62522]: _type = "Task" [ 647.303532] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.314142] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2414999, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.617410] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.507s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 647.618038] env[62522]: DEBUG nova.compute.manager [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 647.624022] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.898s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 647.624022] env[62522]: INFO nova.compute.claims [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 647.641427] env[62522]: DEBUG oslo_vmware.api [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Task: {'id': task-2414996, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.662811] env[62522]: DEBUG oslo_concurrency.lockutils [None req-02931f0c-be4c-4034-998e-867499e2d977 tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.820876] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2414999, 'name': CreateVM_Task, 'duration_secs': 0.395292} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.821141] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 647.823059] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 647.823059] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.823059] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 647.823059] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2b26964-5167-4418-8d1e-2b87a349904f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.829181] env[62522]: DEBUG oslo_vmware.api [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Waiting for the task: (returnval){ [ 647.829181] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520f8833-e3f2-b037-62c2-5794e87af6c3" [ 647.829181] env[62522]: _type = "Task" [ 647.829181] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.839075] env[62522]: DEBUG oslo_vmware.api [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520f8833-e3f2-b037-62c2-5794e87af6c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.888366] env[62522]: DEBUG nova.network.neutron [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Successfully updated port: 13658e84-5e72-4437-ab9f-9ca4363e4eff {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 648.131489] env[62522]: DEBUG oslo_vmware.api [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Task: {'id': task-2414996, 'name': PowerOnVM_Task, 'duration_secs': 0.658096} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.131837] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 648.132078] env[62522]: INFO nova.compute.manager [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Took 11.80 seconds to spawn the instance on the hypervisor. [ 648.132282] env[62522]: DEBUG nova.compute.manager [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 648.134485] env[62522]: DEBUG nova.compute.utils [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 648.139319] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a89d8341-3295-4f4a-912c-b88a6286087e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.142222] env[62522]: DEBUG nova.compute.manager [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 648.142411] env[62522]: DEBUG nova.network.neutron [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 648.271120] env[62522]: DEBUG nova.policy [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b2950283ef08449e9e50edb1dfe828fc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ddb5e4a83f7040088112cdd7aa173257', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 648.342106] env[62522]: DEBUG oslo_vmware.api [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520f8833-e3f2-b037-62c2-5794e87af6c3, 'name': SearchDatastore_Task, 'duration_secs': 0.037958} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.343124] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 648.343356] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 648.343593] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 648.343739] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.343914] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 648.345546] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a6c2661-bae3-4232-a337-f7757a73c658 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.357205] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 648.357399] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 648.358201] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d84c27d4-8cc0-4484-b650-de5880ef7cb2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.369326] env[62522]: DEBUG oslo_vmware.api [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Waiting for the task: (returnval){ [ 648.369326] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520878d8-b542-6031-2f72-87e786b283cf" [ 648.369326] env[62522]: _type = "Task" [ 648.369326] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.380698] env[62522]: DEBUG oslo_vmware.api [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520878d8-b542-6031-2f72-87e786b283cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.394296] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Acquiring lock "refresh_cache-a5657a70-5374-4d52-be9a-2d05f9556d16" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 648.394296] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Acquired lock "refresh_cache-a5657a70-5374-4d52-be9a-2d05f9556d16" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.394296] env[62522]: DEBUG nova.network.neutron [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 648.637272] env[62522]: DEBUG nova.compute.manager [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 648.666028] env[62522]: INFO nova.compute.manager [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Took 25.61 seconds to build instance. [ 648.829698] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Acquiring lock "c8779822-1694-463e-bd06-5f84d867d1bd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 648.831723] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Lock "c8779822-1694-463e-bd06-5f84d867d1bd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.831723] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Acquiring lock "c8779822-1694-463e-bd06-5f84d867d1bd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 648.831723] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Lock "c8779822-1694-463e-bd06-5f84d867d1bd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.831723] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Lock "c8779822-1694-463e-bd06-5f84d867d1bd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 648.833035] env[62522]: INFO nova.compute.manager [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Terminating instance [ 648.888304] env[62522]: DEBUG oslo_vmware.api [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520878d8-b542-6031-2f72-87e786b283cf, 'name': SearchDatastore_Task, 'duration_secs': 0.035687} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.889368] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05e0cf81-5da5-4a22-878b-48c0a51a8806 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.899667] env[62522]: DEBUG oslo_vmware.api [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Waiting for the task: (returnval){ [ 648.899667] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526d935c-49e1-76a4-bb54-73d5213345b6" [ 648.899667] env[62522]: _type = "Task" [ 648.899667] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.913470] env[62522]: DEBUG oslo_vmware.api [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526d935c-49e1-76a4-bb54-73d5213345b6, 'name': SearchDatastore_Task, 'duration_secs': 0.009542} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.916487] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 648.916703] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 9a098809-cc26-4210-b09e-b7825c406294/9a098809-cc26-4210-b09e-b7825c406294.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 648.917198] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0cbcffa2-b7d8-4829-a0cc-b75b727d711d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.925794] env[62522]: DEBUG oslo_vmware.api [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Waiting for the task: (returnval){ [ 648.925794] env[62522]: value = "task-2415000" [ 648.925794] env[62522]: _type = "Task" [ 648.925794] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.939271] env[62522]: DEBUG oslo_vmware.api [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': task-2415000, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.000027] env[62522]: DEBUG nova.network.neutron [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 649.004122] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-633365d4-b544-4a09-a264-26f6ebc3425d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.012648] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f29fd4f3-fcb4-40c2-b58b-fd6c25b2999f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.056455] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d1834fc-967f-4059-9600-386b1e3a8896 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.066156] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0396a26d-5bfb-47bf-bef0-3012f63d6714 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.083637] env[62522]: DEBUG nova.compute.provider_tree [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 649.169231] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bcff25d2-3329-431f-b5c4-a0f857d7bcc8 tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Lock "678b6b5f-b410-4c55-872e-4a74da6d7ebc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.138s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.341346] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Acquiring lock "refresh_cache-c8779822-1694-463e-bd06-5f84d867d1bd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 649.341346] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Acquired lock "refresh_cache-c8779822-1694-463e-bd06-5f84d867d1bd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.341346] env[62522]: DEBUG nova.network.neutron [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 649.343008] env[62522]: DEBUG nova.network.neutron [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Successfully created port: 84b3fbe6-d792-4953-8bdc-9befaa3ed8f5 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 649.439261] env[62522]: DEBUG oslo_vmware.api [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': task-2415000, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.465073} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.439591] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 9a098809-cc26-4210-b09e-b7825c406294/9a098809-cc26-4210-b09e-b7825c406294.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 649.439747] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 649.440043] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-387c1cbb-f4d8-4532-9440-5cc21c655ad4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.447923] env[62522]: DEBUG oslo_vmware.api [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Waiting for the task: (returnval){ [ 649.447923] env[62522]: value = "task-2415001" [ 649.447923] env[62522]: _type = "Task" [ 649.447923] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.457296] env[62522]: DEBUG oslo_vmware.api [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': task-2415001, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.474061] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "c181ce48-9fe2-4400-9047-f8b5a7159dd3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.474585] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "c181ce48-9fe2-4400-9047-f8b5a7159dd3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.588898] env[62522]: DEBUG nova.scheduler.client.report [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 649.591806] env[62522]: DEBUG nova.network.neutron [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Updating instance_info_cache with network_info: [{"id": "13658e84-5e72-4437-ab9f-9ca4363e4eff", "address": "fa:16:3e:b4:e4:31", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.91", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13658e84-5e", "ovs_interfaceid": "13658e84-5e72-4437-ab9f-9ca4363e4eff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.656027] env[62522]: DEBUG nova.compute.manager [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 649.672294] env[62522]: DEBUG nova.compute.manager [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 649.694318] env[62522]: DEBUG nova.virt.hardware [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 649.694587] env[62522]: DEBUG nova.virt.hardware [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 649.694698] env[62522]: DEBUG nova.virt.hardware [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 649.694886] env[62522]: DEBUG nova.virt.hardware [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 649.695047] env[62522]: DEBUG nova.virt.hardware [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 649.695195] env[62522]: DEBUG nova.virt.hardware [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 649.695396] env[62522]: DEBUG nova.virt.hardware [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 649.695546] env[62522]: DEBUG nova.virt.hardware [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 649.695704] env[62522]: DEBUG nova.virt.hardware [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 649.695923] env[62522]: DEBUG nova.virt.hardware [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 649.696062] env[62522]: DEBUG nova.virt.hardware [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 649.698364] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a07754f1-deb1-4e94-a27e-ee5adfd5e68c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.710600] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85f0e9f1-2131-4ba7-af91-5e1d4a64e450 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.898311] env[62522]: DEBUG nova.network.neutron [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 649.959890] env[62522]: DEBUG oslo_vmware.api [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': task-2415001, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081283} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.960100] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 649.961254] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c583d1c5-df30-4611-ae48-0dec98664d9d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.981788] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Reconfiguring VM instance instance-0000000a to attach disk [datastore2] 9a098809-cc26-4210-b09e-b7825c406294/9a098809-cc26-4210-b09e-b7825c406294.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 649.982106] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e9b9f502-6a7f-4a7e-aea9-6988bd968681 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.001523] env[62522]: DEBUG oslo_vmware.api [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Waiting for the task: (returnval){ [ 650.001523] env[62522]: value = "task-2415002" [ 650.001523] env[62522]: _type = "Task" [ 650.001523] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.007361] env[62522]: DEBUG nova.network.neutron [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.012118] env[62522]: DEBUG oslo_vmware.api [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': task-2415002, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.017035] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Acquiring lock "253a2903-2601-4f0a-8882-e7510406f9d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.017255] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Lock "253a2903-2601-4f0a-8882-e7510406f9d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.093872] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.473s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 650.094443] env[62522]: DEBUG nova.compute.manager [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 650.097575] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Releasing lock "refresh_cache-a5657a70-5374-4d52-be9a-2d05f9556d16" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 650.097830] env[62522]: DEBUG nova.compute.manager [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Instance network_info: |[{"id": "13658e84-5e72-4437-ab9f-9ca4363e4eff", "address": "fa:16:3e:b4:e4:31", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.91", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13658e84-5e", "ovs_interfaceid": "13658e84-5e72-4437-ab9f-9ca4363e4eff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 650.098341] env[62522]: DEBUG oslo_concurrency.lockutils [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.331s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.099725] env[62522]: INFO nova.compute.claims [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 650.106268] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b4:e4:31', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f6d1427-d86b-4371-9172-50e4bb0eb1cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '13658e84-5e72-4437-ab9f-9ca4363e4eff', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 650.110612] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Creating folder: Project (cbd92adc76814720ac43cd9c99d21209). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 650.111253] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-69ffd4bd-12c6-4389-8f87-b4981976b91f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.121733] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Created folder: Project (cbd92adc76814720ac43cd9c99d21209) in parent group-v489562. [ 650.122710] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Creating folder: Instances. Parent ref: group-v489587. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 650.122940] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d63f6e09-86ca-4863-8021-07201aaa578a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.133534] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Created folder: Instances in parent group-v489587. [ 650.133534] env[62522]: DEBUG oslo.service.loopingcall [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 650.133534] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 650.133534] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bdb3abeb-d4c0-4a57-a3f4-85594b5fa089 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.151667] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 650.151667] env[62522]: value = "task-2415005" [ 650.151667] env[62522]: _type = "Task" [ 650.151667] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.160210] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415005, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.199215] env[62522]: DEBUG nova.compute.manager [req-9dab7581-2342-494f-b9ea-4a12eb4ed47e req-34942558-5b5d-4ab0-a88d-13d836f72146 service nova] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Received event network-vif-deleted-7268d88d-66d8-4214-a46c-9f03f18f95cb {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 650.199497] env[62522]: DEBUG nova.compute.manager [req-9dab7581-2342-494f-b9ea-4a12eb4ed47e req-34942558-5b5d-4ab0-a88d-13d836f72146 service nova] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Received event network-vif-plugged-13658e84-5e72-4437-ab9f-9ca4363e4eff {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 650.199769] env[62522]: DEBUG oslo_concurrency.lockutils [req-9dab7581-2342-494f-b9ea-4a12eb4ed47e req-34942558-5b5d-4ab0-a88d-13d836f72146 service nova] Acquiring lock "a5657a70-5374-4d52-be9a-2d05f9556d16-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.199995] env[62522]: DEBUG oslo_concurrency.lockutils [req-9dab7581-2342-494f-b9ea-4a12eb4ed47e req-34942558-5b5d-4ab0-a88d-13d836f72146 service nova] Lock "a5657a70-5374-4d52-be9a-2d05f9556d16-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.200176] env[62522]: DEBUG oslo_concurrency.lockutils [req-9dab7581-2342-494f-b9ea-4a12eb4ed47e req-34942558-5b5d-4ab0-a88d-13d836f72146 service nova] Lock "a5657a70-5374-4d52-be9a-2d05f9556d16-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 650.200367] env[62522]: DEBUG nova.compute.manager [req-9dab7581-2342-494f-b9ea-4a12eb4ed47e req-34942558-5b5d-4ab0-a88d-13d836f72146 service nova] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] No waiting events found dispatching network-vif-plugged-13658e84-5e72-4437-ab9f-9ca4363e4eff {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 650.200540] env[62522]: WARNING nova.compute.manager [req-9dab7581-2342-494f-b9ea-4a12eb4ed47e req-34942558-5b5d-4ab0-a88d-13d836f72146 service nova] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Received unexpected event network-vif-plugged-13658e84-5e72-4437-ab9f-9ca4363e4eff for instance with vm_state building and task_state spawning. [ 650.200678] env[62522]: DEBUG nova.compute.manager [req-9dab7581-2342-494f-b9ea-4a12eb4ed47e req-34942558-5b5d-4ab0-a88d-13d836f72146 service nova] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Received event network-changed-13658e84-5e72-4437-ab9f-9ca4363e4eff {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 650.200824] env[62522]: DEBUG nova.compute.manager [req-9dab7581-2342-494f-b9ea-4a12eb4ed47e req-34942558-5b5d-4ab0-a88d-13d836f72146 service nova] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Refreshing instance network info cache due to event network-changed-13658e84-5e72-4437-ab9f-9ca4363e4eff. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 650.201019] env[62522]: DEBUG oslo_concurrency.lockutils [req-9dab7581-2342-494f-b9ea-4a12eb4ed47e req-34942558-5b5d-4ab0-a88d-13d836f72146 service nova] Acquiring lock "refresh_cache-a5657a70-5374-4d52-be9a-2d05f9556d16" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 650.201141] env[62522]: DEBUG oslo_concurrency.lockutils [req-9dab7581-2342-494f-b9ea-4a12eb4ed47e req-34942558-5b5d-4ab0-a88d-13d836f72146 service nova] Acquired lock "refresh_cache-a5657a70-5374-4d52-be9a-2d05f9556d16" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.201308] env[62522]: DEBUG nova.network.neutron [req-9dab7581-2342-494f-b9ea-4a12eb4ed47e req-34942558-5b5d-4ab0-a88d-13d836f72146 service nova] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Refreshing network info cache for port 13658e84-5e72-4437-ab9f-9ca4363e4eff {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 650.203304] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d624558a-64db-4c66-ac73-7c3fcc5b3bb8 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Acquiring lock "758ed671-347a-4949-9842-2f8cdcd261ae" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.203470] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d624558a-64db-4c66-ac73-7c3fcc5b3bb8 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Lock "758ed671-347a-4949-9842-2f8cdcd261ae" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.203884] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d624558a-64db-4c66-ac73-7c3fcc5b3bb8 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Acquiring lock "758ed671-347a-4949-9842-2f8cdcd261ae-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.203884] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d624558a-64db-4c66-ac73-7c3fcc5b3bb8 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Lock "758ed671-347a-4949-9842-2f8cdcd261ae-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.204106] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d624558a-64db-4c66-ac73-7c3fcc5b3bb8 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Lock "758ed671-347a-4949-9842-2f8cdcd261ae-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 650.206101] env[62522]: INFO nova.compute.manager [None req-d624558a-64db-4c66-ac73-7c3fcc5b3bb8 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Terminating instance [ 650.208399] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.515413] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Releasing lock "refresh_cache-c8779822-1694-463e-bd06-5f84d867d1bd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 650.515413] env[62522]: DEBUG nova.compute.manager [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 650.515413] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 650.515712] env[62522]: DEBUG oslo_vmware.api [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': task-2415002, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.516630] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca88031d-d652-480f-984e-4312786c1d72 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.526040] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 650.526040] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e292fe05-602b-4e45-a5e9-cbf7367c64a2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.531781] env[62522]: DEBUG oslo_vmware.api [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Waiting for the task: (returnval){ [ 650.531781] env[62522]: value = "task-2415006" [ 650.531781] env[62522]: _type = "Task" [ 650.531781] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.545293] env[62522]: DEBUG oslo_vmware.api [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Task: {'id': task-2415006, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.599926] env[62522]: DEBUG nova.compute.utils [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 650.602282] env[62522]: DEBUG nova.compute.manager [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 650.605018] env[62522]: DEBUG nova.network.neutron [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 650.668122] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415005, 'name': CreateVM_Task, 'duration_secs': 0.421809} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.668388] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 650.669214] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 650.669343] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.669658] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 650.669914] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a52d144-b3ba-489f-9513-62f77b4ca902 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.674754] env[62522]: DEBUG oslo_vmware.api [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Waiting for the task: (returnval){ [ 650.674754] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52784628-8b44-9d22-40b1-81237c813b5b" [ 650.674754] env[62522]: _type = "Task" [ 650.674754] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.676178] env[62522]: DEBUG nova.policy [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd228928a029446b1905f0f711732120a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '83e5b02095fc42ea9b8a2fb7c4900fe3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 650.687270] env[62522]: DEBUG oslo_vmware.api [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52784628-8b44-9d22-40b1-81237c813b5b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.702124] env[62522]: DEBUG nova.compute.manager [None req-52ef49f4-2c3e-43da-9d85-64d29939a849 tempest-ServerExternalEventsTest-551886804 tempest-ServerExternalEventsTest-551886804-project] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Received event network-changed {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 650.702481] env[62522]: DEBUG nova.compute.manager [None req-52ef49f4-2c3e-43da-9d85-64d29939a849 tempest-ServerExternalEventsTest-551886804 tempest-ServerExternalEventsTest-551886804-project] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Refreshing instance network info cache due to event network-changed. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 650.702872] env[62522]: DEBUG oslo_concurrency.lockutils [None req-52ef49f4-2c3e-43da-9d85-64d29939a849 tempest-ServerExternalEventsTest-551886804 tempest-ServerExternalEventsTest-551886804-project] Acquiring lock "refresh_cache-678b6b5f-b410-4c55-872e-4a74da6d7ebc" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 650.702872] env[62522]: DEBUG oslo_concurrency.lockutils [None req-52ef49f4-2c3e-43da-9d85-64d29939a849 tempest-ServerExternalEventsTest-551886804 tempest-ServerExternalEventsTest-551886804-project] Acquired lock "refresh_cache-678b6b5f-b410-4c55-872e-4a74da6d7ebc" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.703023] env[62522]: DEBUG nova.network.neutron [None req-52ef49f4-2c3e-43da-9d85-64d29939a849 tempest-ServerExternalEventsTest-551886804 tempest-ServerExternalEventsTest-551886804-project] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 650.712743] env[62522]: DEBUG nova.compute.manager [None req-d624558a-64db-4c66-ac73-7c3fcc5b3bb8 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 650.712743] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d624558a-64db-4c66-ac73-7c3fcc5b3bb8 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 650.716326] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abf4e7f2-db51-4a09-aaf2-f1bd495968af {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.725558] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d624558a-64db-4c66-ac73-7c3fcc5b3bb8 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 650.726649] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-caa2a5ba-072f-437a-8d59-6d9cb35d4d44 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.734924] env[62522]: DEBUG oslo_vmware.api [None req-d624558a-64db-4c66-ac73-7c3fcc5b3bb8 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Waiting for the task: (returnval){ [ 650.734924] env[62522]: value = "task-2415007" [ 650.734924] env[62522]: _type = "Task" [ 650.734924] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.744842] env[62522]: DEBUG oslo_vmware.api [None req-d624558a-64db-4c66-ac73-7c3fcc5b3bb8 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Task: {'id': task-2415007, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.836901] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Acquiring lock "17e1557d-e4cf-45b0-84da-4cbcffe31fb6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.836901] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Lock "17e1557d-e4cf-45b0-84da-4cbcffe31fb6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 651.013750] env[62522]: DEBUG oslo_vmware.api [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': task-2415002, 'name': ReconfigVM_Task, 'duration_secs': 0.590848} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.014097] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Reconfigured VM instance instance-0000000a to attach disk [datastore2] 9a098809-cc26-4210-b09e-b7825c406294/9a098809-cc26-4210-b09e-b7825c406294.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 651.014724] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2e8a1884-8426-41a5-9708-fc94aa58ea27 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.023176] env[62522]: DEBUG oslo_vmware.api [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Waiting for the task: (returnval){ [ 651.023176] env[62522]: value = "task-2415008" [ 651.023176] env[62522]: _type = "Task" [ 651.023176] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.035342] env[62522]: DEBUG oslo_vmware.api [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': task-2415008, 'name': Rename_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.044622] env[62522]: DEBUG oslo_vmware.api [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Task: {'id': task-2415006, 'name': PowerOffVM_Task, 'duration_secs': 0.135225} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.045219] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 651.045534] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 651.045806] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-18c9003d-0980-45a1-97ce-77eb0021b446 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.050036] env[62522]: DEBUG nova.network.neutron [req-9dab7581-2342-494f-b9ea-4a12eb4ed47e req-34942558-5b5d-4ab0-a88d-13d836f72146 service nova] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Updated VIF entry in instance network info cache for port 13658e84-5e72-4437-ab9f-9ca4363e4eff. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 651.050036] env[62522]: DEBUG nova.network.neutron [req-9dab7581-2342-494f-b9ea-4a12eb4ed47e req-34942558-5b5d-4ab0-a88d-13d836f72146 service nova] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Updating instance_info_cache with network_info: [{"id": "13658e84-5e72-4437-ab9f-9ca4363e4eff", "address": "fa:16:3e:b4:e4:31", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.91", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13658e84-5e", "ovs_interfaceid": "13658e84-5e72-4437-ab9f-9ca4363e4eff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.073892] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 651.074167] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 651.074325] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Deleting the datastore file [datastore2] c8779822-1694-463e-bd06-5f84d867d1bd {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 651.075031] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a313938d-84a9-48be-9732-67734bb17c9a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.085147] env[62522]: DEBUG oslo_vmware.api [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Waiting for the task: (returnval){ [ 651.085147] env[62522]: value = "task-2415010" [ 651.085147] env[62522]: _type = "Task" [ 651.085147] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.097675] env[62522]: DEBUG oslo_vmware.api [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Task: {'id': task-2415010, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.105649] env[62522]: DEBUG nova.compute.manager [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 651.194261] env[62522]: DEBUG oslo_vmware.api [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52784628-8b44-9d22-40b1-81237c813b5b, 'name': SearchDatastore_Task, 'duration_secs': 0.043033} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.199679] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 651.199943] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 651.200206] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 651.200346] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.200522] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 651.201458] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dbd88738-c58c-42cd-9c94-08df12037074 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.211895] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 651.213331] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 651.215159] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f3b39de-6a4e-42b0-9a38-97b2c0d8477c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.222277] env[62522]: DEBUG oslo_vmware.api [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Waiting for the task: (returnval){ [ 651.222277] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b1865e-9bea-abc4-9b6a-5c4957bba9a5" [ 651.222277] env[62522]: _type = "Task" [ 651.222277] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.234844] env[62522]: DEBUG oslo_vmware.api [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b1865e-9bea-abc4-9b6a-5c4957bba9a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.242800] env[62522]: DEBUG oslo_vmware.api [None req-d624558a-64db-4c66-ac73-7c3fcc5b3bb8 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Task: {'id': task-2415007, 'name': PowerOffVM_Task, 'duration_secs': 0.3547} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.245357] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d624558a-64db-4c66-ac73-7c3fcc5b3bb8 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 651.245449] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d624558a-64db-4c66-ac73-7c3fcc5b3bb8 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 651.246576] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-706c95ec-f7db-4794-9a13-28d47bdbe933 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.311684] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d624558a-64db-4c66-ac73-7c3fcc5b3bb8 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 651.312732] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d624558a-64db-4c66-ac73-7c3fcc5b3bb8 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 651.312937] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-d624558a-64db-4c66-ac73-7c3fcc5b3bb8 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Deleting the datastore file [datastore1] 758ed671-347a-4949-9842-2f8cdcd261ae {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 651.313286] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-59b2de2c-23d2-40fd-b795-f961d53704f0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.328398] env[62522]: DEBUG oslo_vmware.api [None req-d624558a-64db-4c66-ac73-7c3fcc5b3bb8 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Waiting for the task: (returnval){ [ 651.328398] env[62522]: value = "task-2415012" [ 651.328398] env[62522]: _type = "Task" [ 651.328398] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.338588] env[62522]: DEBUG oslo_vmware.api [None req-d624558a-64db-4c66-ac73-7c3fcc5b3bb8 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Task: {'id': task-2415012, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.538213] env[62522]: DEBUG oslo_vmware.api [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': task-2415008, 'name': Rename_Task, 'duration_secs': 0.174947} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.538213] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 651.538213] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dddd11f4-cce8-4270-8b49-ebfcdcbe1be9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.550486] env[62522]: DEBUG oslo_vmware.api [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Waiting for the task: (returnval){ [ 651.550486] env[62522]: value = "task-2415013" [ 651.550486] env[62522]: _type = "Task" [ 651.550486] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.558485] env[62522]: DEBUG oslo_concurrency.lockutils [req-9dab7581-2342-494f-b9ea-4a12eb4ed47e req-34942558-5b5d-4ab0-a88d-13d836f72146 service nova] Releasing lock "refresh_cache-a5657a70-5374-4d52-be9a-2d05f9556d16" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 651.558927] env[62522]: DEBUG oslo_vmware.api [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': task-2415013, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.608690] env[62522]: DEBUG oslo_vmware.api [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Task: {'id': task-2415010, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165042} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.609841] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 651.610145] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 651.610286] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 651.610474] env[62522]: INFO nova.compute.manager [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Took 1.10 seconds to destroy the instance on the hypervisor. [ 651.610672] env[62522]: DEBUG oslo.service.loopingcall [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 651.611438] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-361c1395-56f3-4636-a93d-c25c781f822d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.617556] env[62522]: DEBUG nova.compute.manager [-] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 651.619217] env[62522]: DEBUG nova.network.neutron [-] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 651.627392] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2630486c-48f2-4490-8884-fc28b0eb4a3e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.668814] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9eda562-a82b-4892-80e0-d85f715db034 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.679542] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59afd8f9-e312-474b-9d3e-2694a1c880b1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.697220] env[62522]: DEBUG nova.compute.provider_tree [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 651.699478] env[62522]: DEBUG nova.network.neutron [-] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 651.701097] env[62522]: DEBUG nova.network.neutron [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Successfully updated port: 84b3fbe6-d792-4953-8bdc-9befaa3ed8f5 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 651.737406] env[62522]: DEBUG oslo_vmware.api [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b1865e-9bea-abc4-9b6a-5c4957bba9a5, 'name': SearchDatastore_Task, 'duration_secs': 0.025079} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.738255] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ceec3dc-5190-482b-af31-9f84cc42fa42 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.743699] env[62522]: DEBUG oslo_vmware.api [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Waiting for the task: (returnval){ [ 651.743699] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52756357-bf5d-537f-e590-4e102bb5be65" [ 651.743699] env[62522]: _type = "Task" [ 651.743699] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.751767] env[62522]: DEBUG nova.network.neutron [None req-52ef49f4-2c3e-43da-9d85-64d29939a849 tempest-ServerExternalEventsTest-551886804 tempest-ServerExternalEventsTest-551886804-project] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Updating instance_info_cache with network_info: [{"id": "20713b6e-4b87-4065-a83b-f62812551cd5", "address": "fa:16:3e:a5:e2:12", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.43", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20713b6e-4b", "ovs_interfaceid": "20713b6e-4b87-4065-a83b-f62812551cd5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.755086] env[62522]: DEBUG oslo_vmware.api [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52756357-bf5d-537f-e590-4e102bb5be65, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.829183] env[62522]: DEBUG nova.network.neutron [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Successfully created port: fd0b859a-1918-4692-a81c-b2b0e41951a5 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 651.843352] env[62522]: DEBUG oslo_vmware.api [None req-d624558a-64db-4c66-ac73-7c3fcc5b3bb8 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Task: {'id': task-2415012, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.389956} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.843507] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-d624558a-64db-4c66-ac73-7c3fcc5b3bb8 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 651.843686] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d624558a-64db-4c66-ac73-7c3fcc5b3bb8 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 651.843904] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d624558a-64db-4c66-ac73-7c3fcc5b3bb8 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 651.844252] env[62522]: INFO nova.compute.manager [None req-d624558a-64db-4c66-ac73-7c3fcc5b3bb8 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Took 1.13 seconds to destroy the instance on the hypervisor. [ 651.844340] env[62522]: DEBUG oslo.service.loopingcall [None req-d624558a-64db-4c66-ac73-7c3fcc5b3bb8 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 651.844468] env[62522]: DEBUG nova.compute.manager [-] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 651.844563] env[62522]: DEBUG nova.network.neutron [-] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 652.061337] env[62522]: DEBUG oslo_vmware.api [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': task-2415013, 'name': PowerOnVM_Task} progress is 76%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.073770] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Acquiring lock "cce5f0d4-364d-4295-a27d-44ca8585f803" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 652.074304] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Lock "cce5f0d4-364d-4295-a27d-44ca8585f803" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 652.121923] env[62522]: DEBUG nova.compute.manager [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 652.153253] env[62522]: DEBUG nova.virt.hardware [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 652.153537] env[62522]: DEBUG nova.virt.hardware [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 652.153646] env[62522]: DEBUG nova.virt.hardware [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 652.153825] env[62522]: DEBUG nova.virt.hardware [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 652.153972] env[62522]: DEBUG nova.virt.hardware [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 652.154138] env[62522]: DEBUG nova.virt.hardware [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 652.154350] env[62522]: DEBUG nova.virt.hardware [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 652.154704] env[62522]: DEBUG nova.virt.hardware [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 652.154976] env[62522]: DEBUG nova.virt.hardware [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 652.155185] env[62522]: DEBUG nova.virt.hardware [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 652.155362] env[62522]: DEBUG nova.virt.hardware [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 652.156532] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d409e067-ab5f-423e-9388-e09399b8b63a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.165957] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18de65a3-36cb-4e46-b28b-abd6d1ac5e81 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.205045] env[62522]: DEBUG nova.scheduler.client.report [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 652.210310] env[62522]: DEBUG nova.network.neutron [-] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.211144] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Acquiring lock "refresh_cache-84ad5317-344d-44c1-9318-fa1574321296" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 652.211381] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Acquired lock "refresh_cache-84ad5317-344d-44c1-9318-fa1574321296" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.211462] env[62522]: DEBUG nova.network.neutron [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 652.262225] env[62522]: DEBUG oslo_concurrency.lockutils [None req-52ef49f4-2c3e-43da-9d85-64d29939a849 tempest-ServerExternalEventsTest-551886804 tempest-ServerExternalEventsTest-551886804-project] Releasing lock "refresh_cache-678b6b5f-b410-4c55-872e-4a74da6d7ebc" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 652.262768] env[62522]: DEBUG oslo_vmware.api [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52756357-bf5d-537f-e590-4e102bb5be65, 'name': SearchDatastore_Task, 'duration_secs': 0.016609} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.263050] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 652.263368] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] a5657a70-5374-4d52-be9a-2d05f9556d16/a5657a70-5374-4d52-be9a-2d05f9556d16.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 652.263658] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4b114207-5a79-49fe-a26c-5fb8c2515886 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.270879] env[62522]: DEBUG oslo_vmware.api [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Waiting for the task: (returnval){ [ 652.270879] env[62522]: value = "task-2415014" [ 652.270879] env[62522]: _type = "Task" [ 652.270879] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.286308] env[62522]: DEBUG oslo_vmware.api [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Task: {'id': task-2415014, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.562104] env[62522]: DEBUG oslo_vmware.api [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': task-2415013, 'name': PowerOnVM_Task, 'duration_secs': 0.847858} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.562104] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 652.562104] env[62522]: INFO nova.compute.manager [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Took 5.41 seconds to spawn the instance on the hypervisor. [ 652.564153] env[62522]: DEBUG nova.compute.manager [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 652.564153] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce7329ab-3356-4496-9f5d-cfdc8da741a1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.716987] env[62522]: DEBUG oslo_concurrency.lockutils [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.618s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 652.717576] env[62522]: DEBUG nova.compute.manager [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 652.720180] env[62522]: INFO nova.compute.manager [-] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Took 1.10 seconds to deallocate network for instance. [ 652.722865] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.604s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 652.723146] env[62522]: DEBUG nova.objects.instance [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] Lazy-loading 'resources' on Instance uuid a3830103-2dcb-40ac-8e62-b331fe4673ff {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 652.784309] env[62522]: DEBUG oslo_vmware.api [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Task: {'id': task-2415014, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.793788] env[62522]: DEBUG nova.network.neutron [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 652.919515] env[62522]: DEBUG nova.network.neutron [-] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.998116] env[62522]: DEBUG nova.network.neutron [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Updating instance_info_cache with network_info: [{"id": "84b3fbe6-d792-4953-8bdc-9befaa3ed8f5", "address": "fa:16:3e:20:a8:fe", "network": {"id": "057a8a28-3749-4366-abe1-1a10f81258ef", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-656976695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ddb5e4a83f7040088112cdd7aa173257", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cc448a80-6318-4b6a-b0a0-85fe6cc645df", "external-id": "nsx-vlan-transportzone-91", "segmentation_id": 91, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84b3fbe6-d7", "ovs_interfaceid": "84b3fbe6-d792-4953-8bdc-9befaa3ed8f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.044022] env[62522]: DEBUG oslo_concurrency.lockutils [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "cd69a052-369b-4809-baf0-a1aec44f4ab5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.044157] env[62522]: DEBUG oslo_concurrency.lockutils [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "cd69a052-369b-4809-baf0-a1aec44f4ab5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.084114] env[62522]: INFO nova.compute.manager [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Took 28.94 seconds to build instance. [ 653.122419] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bb32ad7a-c1e4-4ac7-9d2a-960491da577f tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Acquiring lock "678b6b5f-b410-4c55-872e-4a74da6d7ebc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.122419] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bb32ad7a-c1e4-4ac7-9d2a-960491da577f tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Lock "678b6b5f-b410-4c55-872e-4a74da6d7ebc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.122918] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bb32ad7a-c1e4-4ac7-9d2a-960491da577f tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Acquiring lock "678b6b5f-b410-4c55-872e-4a74da6d7ebc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.123256] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bb32ad7a-c1e4-4ac7-9d2a-960491da577f tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Lock "678b6b5f-b410-4c55-872e-4a74da6d7ebc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.123516] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bb32ad7a-c1e4-4ac7-9d2a-960491da577f tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Lock "678b6b5f-b410-4c55-872e-4a74da6d7ebc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 653.125736] env[62522]: INFO nova.compute.manager [None req-bb32ad7a-c1e4-4ac7-9d2a-960491da577f tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Terminating instance [ 653.142120] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Acquiring lock "8461f823-e48a-42f0-8863-44177565b82d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.142634] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Lock "8461f823-e48a-42f0-8863-44177565b82d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.224603] env[62522]: DEBUG nova.compute.utils [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 653.226392] env[62522]: DEBUG nova.compute.manager [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 653.226536] env[62522]: DEBUG nova.network.neutron [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 653.233727] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.289942] env[62522]: DEBUG oslo_vmware.api [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Task: {'id': task-2415014, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.68854} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.290288] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] a5657a70-5374-4d52-be9a-2d05f9556d16/a5657a70-5374-4d52-be9a-2d05f9556d16.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 653.290550] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 653.291024] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-10fb8a6c-2c78-418b-9577-3b2ba875fb27 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.303140] env[62522]: DEBUG oslo_vmware.api [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Waiting for the task: (returnval){ [ 653.303140] env[62522]: value = "task-2415015" [ 653.303140] env[62522]: _type = "Task" [ 653.303140] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.311204] env[62522]: DEBUG oslo_vmware.api [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Task: {'id': task-2415015, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.315939] env[62522]: DEBUG nova.policy [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8b8d5f0f4a5e40a7b4c7d1c7bfbb90f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '707c18dc3f934d35b85e59f08ea537ca', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 653.425392] env[62522]: INFO nova.compute.manager [-] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Took 1.58 seconds to deallocate network for instance. [ 653.505157] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Releasing lock "refresh_cache-84ad5317-344d-44c1-9318-fa1574321296" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 653.505484] env[62522]: DEBUG nova.compute.manager [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Instance network_info: |[{"id": "84b3fbe6-d792-4953-8bdc-9befaa3ed8f5", "address": "fa:16:3e:20:a8:fe", "network": {"id": "057a8a28-3749-4366-abe1-1a10f81258ef", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-656976695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ddb5e4a83f7040088112cdd7aa173257", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cc448a80-6318-4b6a-b0a0-85fe6cc645df", "external-id": "nsx-vlan-transportzone-91", "segmentation_id": 91, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84b3fbe6-d7", "ovs_interfaceid": "84b3fbe6-d792-4953-8bdc-9befaa3ed8f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 653.506137] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:20:a8:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cc448a80-6318-4b6a-b0a0-85fe6cc645df', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '84b3fbe6-d792-4953-8bdc-9befaa3ed8f5', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 653.513631] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Creating folder: Project (ddb5e4a83f7040088112cdd7aa173257). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 653.514588] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-92fb818e-eef2-48c0-bc8b-2c76f1c80eaf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.527769] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Created folder: Project (ddb5e4a83f7040088112cdd7aa173257) in parent group-v489562. [ 653.528027] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Creating folder: Instances. Parent ref: group-v489590. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 653.528285] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b593e678-2e27-468c-8d92-8ea8067c420e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.538090] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Created folder: Instances in parent group-v489590. [ 653.538090] env[62522]: DEBUG oslo.service.loopingcall [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 653.538090] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 653.538090] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-054ce13c-9971-4458-b531-214a39d74775 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.558850] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 653.558850] env[62522]: value = "task-2415018" [ 653.558850] env[62522]: _type = "Task" [ 653.558850] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.570935] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415018, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.587120] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c3e2f7fc-05e5-4265-b312-5c83a7d56794 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Lock "9a098809-cc26-4210-b09e-b7825c406294" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.454s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 653.629486] env[62522]: DEBUG nova.compute.manager [None req-bb32ad7a-c1e4-4ac7-9d2a-960491da577f tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 653.629688] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bb32ad7a-c1e4-4ac7-9d2a-960491da577f tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 653.630574] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21dc5122-6245-4b16-9a67-97cb321c239b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.642579] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb32ad7a-c1e4-4ac7-9d2a-960491da577f tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 653.644252] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-983e6f37-ec27-4f29-bb4e-b90c587e89ea {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.654169] env[62522]: DEBUG oslo_vmware.api [None req-bb32ad7a-c1e4-4ac7-9d2a-960491da577f tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Waiting for the task: (returnval){ [ 653.654169] env[62522]: value = "task-2415019" [ 653.654169] env[62522]: _type = "Task" [ 653.654169] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.667723] env[62522]: DEBUG oslo_vmware.api [None req-bb32ad7a-c1e4-4ac7-9d2a-960491da577f tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Task: {'id': task-2415019, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.690419] env[62522]: DEBUG nova.network.neutron [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Successfully created port: e05da487-a40f-44d2-a390-d0795275ff10 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 653.695321] env[62522]: DEBUG nova.compute.manager [req-391e6ed6-e979-4ef7-8cd7-32b98e6b5a21 req-ec8932ca-6d81-459c-9ab5-87f77792a703 service nova] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Received event network-vif-plugged-84b3fbe6-d792-4953-8bdc-9befaa3ed8f5 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 653.695519] env[62522]: DEBUG oslo_concurrency.lockutils [req-391e6ed6-e979-4ef7-8cd7-32b98e6b5a21 req-ec8932ca-6d81-459c-9ab5-87f77792a703 service nova] Acquiring lock "84ad5317-344d-44c1-9318-fa1574321296-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.695719] env[62522]: DEBUG oslo_concurrency.lockutils [req-391e6ed6-e979-4ef7-8cd7-32b98e6b5a21 req-ec8932ca-6d81-459c-9ab5-87f77792a703 service nova] Lock "84ad5317-344d-44c1-9318-fa1574321296-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.695919] env[62522]: DEBUG oslo_concurrency.lockutils [req-391e6ed6-e979-4ef7-8cd7-32b98e6b5a21 req-ec8932ca-6d81-459c-9ab5-87f77792a703 service nova] Lock "84ad5317-344d-44c1-9318-fa1574321296-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 653.697649] env[62522]: DEBUG nova.compute.manager [req-391e6ed6-e979-4ef7-8cd7-32b98e6b5a21 req-ec8932ca-6d81-459c-9ab5-87f77792a703 service nova] [instance: 84ad5317-344d-44c1-9318-fa1574321296] No waiting events found dispatching network-vif-plugged-84b3fbe6-d792-4953-8bdc-9befaa3ed8f5 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 653.697885] env[62522]: WARNING nova.compute.manager [req-391e6ed6-e979-4ef7-8cd7-32b98e6b5a21 req-ec8932ca-6d81-459c-9ab5-87f77792a703 service nova] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Received unexpected event network-vif-plugged-84b3fbe6-d792-4953-8bdc-9befaa3ed8f5 for instance with vm_state building and task_state spawning. [ 653.698176] env[62522]: DEBUG nova.compute.manager [req-391e6ed6-e979-4ef7-8cd7-32b98e6b5a21 req-ec8932ca-6d81-459c-9ab5-87f77792a703 service nova] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Received event network-changed-84b3fbe6-d792-4953-8bdc-9befaa3ed8f5 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 653.698396] env[62522]: DEBUG nova.compute.manager [req-391e6ed6-e979-4ef7-8cd7-32b98e6b5a21 req-ec8932ca-6d81-459c-9ab5-87f77792a703 service nova] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Refreshing instance network info cache due to event network-changed-84b3fbe6-d792-4953-8bdc-9befaa3ed8f5. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 653.698633] env[62522]: DEBUG oslo_concurrency.lockutils [req-391e6ed6-e979-4ef7-8cd7-32b98e6b5a21 req-ec8932ca-6d81-459c-9ab5-87f77792a703 service nova] Acquiring lock "refresh_cache-84ad5317-344d-44c1-9318-fa1574321296" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 653.698806] env[62522]: DEBUG oslo_concurrency.lockutils [req-391e6ed6-e979-4ef7-8cd7-32b98e6b5a21 req-ec8932ca-6d81-459c-9ab5-87f77792a703 service nova] Acquired lock "refresh_cache-84ad5317-344d-44c1-9318-fa1574321296" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.699060] env[62522]: DEBUG nova.network.neutron [req-391e6ed6-e979-4ef7-8cd7-32b98e6b5a21 req-ec8932ca-6d81-459c-9ab5-87f77792a703 service nova] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Refreshing network info cache for port 84b3fbe6-d792-4953-8bdc-9befaa3ed8f5 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 653.730477] env[62522]: DEBUG nova.compute.manager [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 653.734783] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92dff4ab-d31c-4928-b0fd-378c7cc1809e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.743784] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89132074-939d-4421-968c-35e44e80193b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.785780] env[62522]: DEBUG nova.compute.manager [req-e3f46f1b-e900-4638-959a-e048fc70fede req-0fa9f300-2c24-4b89-aaef-4d0adbe2ea54 service nova] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Received event network-vif-deleted-fae6b6fe-00ac-409a-be5f-719500f98702 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 653.786862] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6352bbea-a34c-475a-be41-e61598b8bd8b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.796557] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f47a5b-c567-4fb3-9c80-1512a4141a51 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.814963] env[62522]: DEBUG nova.compute.provider_tree [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 653.821707] env[62522]: DEBUG oslo_vmware.api [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Task: {'id': task-2415015, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070034} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.821973] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 653.822844] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a43acafd-179a-425a-a26a-fe4b63f12d4a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.846874] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Reconfiguring VM instance instance-00000009 to attach disk [datastore2] a5657a70-5374-4d52-be9a-2d05f9556d16/a5657a70-5374-4d52-be9a-2d05f9556d16.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 653.847643] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-871dd913-6646-422a-8c3e-9e44a115a02c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.868955] env[62522]: DEBUG oslo_vmware.api [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Waiting for the task: (returnval){ [ 653.868955] env[62522]: value = "task-2415020" [ 653.868955] env[62522]: _type = "Task" [ 653.868955] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.879107] env[62522]: DEBUG oslo_vmware.api [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Task: {'id': task-2415020, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.937116] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d624558a-64db-4c66-ac73-7c3fcc5b3bb8 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.069770] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415018, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.092554] env[62522]: DEBUG nova.compute.manager [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 654.164451] env[62522]: DEBUG oslo_vmware.api [None req-bb32ad7a-c1e4-4ac7-9d2a-960491da577f tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Task: {'id': task-2415019, 'name': PowerOffVM_Task, 'duration_secs': 0.216405} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.164723] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb32ad7a-c1e4-4ac7-9d2a-960491da577f tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 654.164893] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bb32ad7a-c1e4-4ac7-9d2a-960491da577f tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 654.165151] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-815b8eeb-3e4c-49ea-8ec6-a801bbdb2d33 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.246161] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bb32ad7a-c1e4-4ac7-9d2a-960491da577f tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 654.246161] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bb32ad7a-c1e4-4ac7-9d2a-960491da577f tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 654.246161] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb32ad7a-c1e4-4ac7-9d2a-960491da577f tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Deleting the datastore file [datastore1] 678b6b5f-b410-4c55-872e-4a74da6d7ebc {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 654.246161] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9e97311f-3b21-4774-9bbd-ba2f1f8dac33 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.250851] env[62522]: DEBUG oslo_vmware.api [None req-bb32ad7a-c1e4-4ac7-9d2a-960491da577f tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Waiting for the task: (returnval){ [ 654.250851] env[62522]: value = "task-2415022" [ 654.250851] env[62522]: _type = "Task" [ 654.250851] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.258542] env[62522]: DEBUG oslo_vmware.api [None req-bb32ad7a-c1e4-4ac7-9d2a-960491da577f tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Task: {'id': task-2415022, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.319276] env[62522]: DEBUG nova.scheduler.client.report [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 654.378792] env[62522]: DEBUG oslo_vmware.api [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Task: {'id': task-2415020, 'name': ReconfigVM_Task, 'duration_secs': 0.32162} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.379055] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Reconfigured VM instance instance-00000009 to attach disk [datastore2] a5657a70-5374-4d52-be9a-2d05f9556d16/a5657a70-5374-4d52-be9a-2d05f9556d16.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 654.379622] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7f11c442-7f66-4c4e-9919-891fa1486aed {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.386430] env[62522]: DEBUG oslo_vmware.api [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Waiting for the task: (returnval){ [ 654.386430] env[62522]: value = "task-2415023" [ 654.386430] env[62522]: _type = "Task" [ 654.386430] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.397773] env[62522]: DEBUG oslo_vmware.api [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Task: {'id': task-2415023, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.571947] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415018, 'name': CreateVM_Task, 'duration_secs': 0.517139} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.575184] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 654.576033] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 654.578967] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.578967] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 654.578967] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b729de1a-bd17-4800-9177-aa850630f9c0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.581867] env[62522]: DEBUG oslo_vmware.api [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Waiting for the task: (returnval){ [ 654.581867] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5201ce9b-4104-b950-11dd-00b41af3365a" [ 654.581867] env[62522]: _type = "Task" [ 654.581867] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.592847] env[62522]: DEBUG oslo_vmware.api [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5201ce9b-4104-b950-11dd-00b41af3365a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.621031] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.750347] env[62522]: DEBUG nova.compute.manager [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 654.767477] env[62522]: DEBUG oslo_vmware.api [None req-bb32ad7a-c1e4-4ac7-9d2a-960491da577f tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Task: {'id': task-2415022, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.259493} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.767477] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb32ad7a-c1e4-4ac7-9d2a-960491da577f tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 654.767477] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bb32ad7a-c1e4-4ac7-9d2a-960491da577f tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 654.767477] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bb32ad7a-c1e4-4ac7-9d2a-960491da577f tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 654.767477] env[62522]: INFO nova.compute.manager [None req-bb32ad7a-c1e4-4ac7-9d2a-960491da577f tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Took 1.14 seconds to destroy the instance on the hypervisor. [ 654.767649] env[62522]: DEBUG oslo.service.loopingcall [None req-bb32ad7a-c1e4-4ac7-9d2a-960491da577f tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 654.767649] env[62522]: DEBUG nova.compute.manager [-] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 654.767649] env[62522]: DEBUG nova.network.neutron [-] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 654.772270] env[62522]: DEBUG nova.network.neutron [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Successfully updated port: fd0b859a-1918-4692-a81c-b2b0e41951a5 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 654.786610] env[62522]: DEBUG nova.virt.hardware [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 654.787251] env[62522]: DEBUG nova.virt.hardware [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 654.787251] env[62522]: DEBUG nova.virt.hardware [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 654.788128] env[62522]: DEBUG nova.virt.hardware [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 654.789124] env[62522]: DEBUG nova.virt.hardware [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 654.789124] env[62522]: DEBUG nova.virt.hardware [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 654.789124] env[62522]: DEBUG nova.virt.hardware [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 654.789124] env[62522]: DEBUG nova.virt.hardware [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 654.789399] env[62522]: DEBUG nova.virt.hardware [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 654.789497] env[62522]: DEBUG nova.virt.hardware [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 654.789610] env[62522]: DEBUG nova.virt.hardware [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 654.790564] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6411d05a-4ecc-4c73-8468-4cd59aa95549 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.800556] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-543bce33-ab94-41f2-a963-6cd78501f6bd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.825442] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.103s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 654.828525] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.222s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.830141] env[62522]: INFO nova.compute.claims [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 654.854923] env[62522]: INFO nova.scheduler.client.report [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] Deleted allocations for instance a3830103-2dcb-40ac-8e62-b331fe4673ff [ 654.898461] env[62522]: DEBUG oslo_vmware.api [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Task: {'id': task-2415023, 'name': Rename_Task, 'duration_secs': 0.14205} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.900923] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 654.900923] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b3269c86-9834-4d0a-9b26-6aa7b3a4037f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.910562] env[62522]: DEBUG oslo_vmware.api [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Waiting for the task: (returnval){ [ 654.910562] env[62522]: value = "task-2415024" [ 654.910562] env[62522]: _type = "Task" [ 654.910562] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.921410] env[62522]: DEBUG oslo_vmware.api [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Task: {'id': task-2415024, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.987668] env[62522]: DEBUG nova.network.neutron [req-391e6ed6-e979-4ef7-8cd7-32b98e6b5a21 req-ec8932ca-6d81-459c-9ab5-87f77792a703 service nova] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Updated VIF entry in instance network info cache for port 84b3fbe6-d792-4953-8bdc-9befaa3ed8f5. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 654.988236] env[62522]: DEBUG nova.network.neutron [req-391e6ed6-e979-4ef7-8cd7-32b98e6b5a21 req-ec8932ca-6d81-459c-9ab5-87f77792a703 service nova] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Updating instance_info_cache with network_info: [{"id": "84b3fbe6-d792-4953-8bdc-9befaa3ed8f5", "address": "fa:16:3e:20:a8:fe", "network": {"id": "057a8a28-3749-4366-abe1-1a10f81258ef", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-656976695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ddb5e4a83f7040088112cdd7aa173257", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cc448a80-6318-4b6a-b0a0-85fe6cc645df", "external-id": "nsx-vlan-transportzone-91", "segmentation_id": 91, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84b3fbe6-d7", "ovs_interfaceid": "84b3fbe6-d792-4953-8bdc-9befaa3ed8f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.997441] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Acquiring lock "ae3e55b8-00c1-4dae-9276-f46a1e17b80e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.997777] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Lock "ae3e55b8-00c1-4dae-9276-f46a1e17b80e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.094098] env[62522]: DEBUG oslo_vmware.api [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5201ce9b-4104-b950-11dd-00b41af3365a, 'name': SearchDatastore_Task, 'duration_secs': 0.02081} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.094485] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 655.094727] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 655.094959] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 655.095123] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.095314] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 655.095584] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-90a84db2-aa8f-4894-9c69-9bda80ff5c93 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.105247] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 655.105426] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 655.106211] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61d615ea-3159-4f01-98ac-09702dbcc1fa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.114241] env[62522]: DEBUG oslo_vmware.api [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Waiting for the task: (returnval){ [ 655.114241] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c80190-0626-5ae1-bbe2-46e2e8cc8ed8" [ 655.114241] env[62522]: _type = "Task" [ 655.114241] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.125070] env[62522]: DEBUG oslo_vmware.api [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c80190-0626-5ae1-bbe2-46e2e8cc8ed8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.280195] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Acquiring lock "refresh_cache-bf2ccaeb-610a-437b-be94-d3caefbe15c5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 655.280195] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Acquired lock "refresh_cache-bf2ccaeb-610a-437b-be94-d3caefbe15c5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.280195] env[62522]: DEBUG nova.network.neutron [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 655.312255] env[62522]: INFO nova.compute.manager [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Rebuilding instance [ 655.364075] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cec14d4-b24e-48d9-93d0-1fc2e769bf1e tempest-DeleteServersAdminTestJSON-1761727981 tempest-DeleteServersAdminTestJSON-1761727981-project-admin] Lock "a3830103-2dcb-40ac-8e62-b331fe4673ff" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.211s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 655.375213] env[62522]: DEBUG nova.compute.manager [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 655.376194] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e0f3b70-d2bb-49e0-80fc-3efa598af811 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.420581] env[62522]: DEBUG oslo_vmware.api [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Task: {'id': task-2415024, 'name': PowerOnVM_Task, 'duration_secs': 0.508318} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.421585] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 655.421789] env[62522]: INFO nova.compute.manager [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Took 10.78 seconds to spawn the instance on the hypervisor. [ 655.422014] env[62522]: DEBUG nova.compute.manager [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 655.422808] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b32c29f-d1f7-43f2-8d19-b6c2d86e686e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.492819] env[62522]: DEBUG oslo_concurrency.lockutils [req-391e6ed6-e979-4ef7-8cd7-32b98e6b5a21 req-ec8932ca-6d81-459c-9ab5-87f77792a703 service nova] Releasing lock "refresh_cache-84ad5317-344d-44c1-9318-fa1574321296" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 655.625064] env[62522]: DEBUG oslo_vmware.api [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c80190-0626-5ae1-bbe2-46e2e8cc8ed8, 'name': SearchDatastore_Task, 'duration_secs': 0.009702} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.625863] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73932474-382b-4b88-b1ac-d89f2473069b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.631513] env[62522]: DEBUG oslo_vmware.api [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Waiting for the task: (returnval){ [ 655.631513] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5265021f-8d51-cb5f-2427-312c8585c0c4" [ 655.631513] env[62522]: _type = "Task" [ 655.631513] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.639320] env[62522]: DEBUG oslo_vmware.api [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5265021f-8d51-cb5f-2427-312c8585c0c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.749776] env[62522]: DEBUG nova.network.neutron [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Successfully updated port: e05da487-a40f-44d2-a390-d0795275ff10 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 655.839132] env[62522]: DEBUG nova.network.neutron [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 655.944559] env[62522]: INFO nova.compute.manager [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Took 31.84 seconds to build instance. [ 656.001308] env[62522]: DEBUG nova.network.neutron [-] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.091908] env[62522]: DEBUG nova.network.neutron [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Updating instance_info_cache with network_info: [{"id": "fd0b859a-1918-4692-a81c-b2b0e41951a5", "address": "fa:16:3e:fc:a5:bb", "network": {"id": "bfcd05f5-bbfb-49bd-9cf7-5fca1d018b65", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-147366971-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83e5b02095fc42ea9b8a2fb7c4900fe3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e99c063c-0cb7-4db6-b077-114166cfe889", "external-id": "nsx-vlan-transportzone-462", "segmentation_id": 462, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd0b859a-19", "ovs_interfaceid": "fd0b859a-1918-4692-a81c-b2b0e41951a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.152537] env[62522]: DEBUG oslo_vmware.api [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5265021f-8d51-cb5f-2427-312c8585c0c4, 'name': SearchDatastore_Task, 'duration_secs': 0.032011} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.155545] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 656.155906] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 84ad5317-344d-44c1-9318-fa1574321296/84ad5317-344d-44c1-9318-fa1574321296.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 656.156507] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3fe90c16-9c72-480d-9f4f-2a211bcbaa47 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.164117] env[62522]: DEBUG oslo_vmware.api [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Waiting for the task: (returnval){ [ 656.164117] env[62522]: value = "task-2415025" [ 656.164117] env[62522]: _type = "Task" [ 656.164117] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.177269] env[62522]: DEBUG oslo_vmware.api [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Task: {'id': task-2415025, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.255345] env[62522]: DEBUG oslo_concurrency.lockutils [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Acquiring lock "refresh_cache-a804f755-58b2-4350-8726-4e82f60afcdc" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 656.255345] env[62522]: DEBUG oslo_concurrency.lockutils [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Acquired lock "refresh_cache-a804f755-58b2-4350-8726-4e82f60afcdc" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.255345] env[62522]: DEBUG nova.network.neutron [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 656.296134] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d372cda1-97dc-487f-83a2-1f5a05c1e8ab {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.305031] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3842369-86b7-4a57-8480-fbdced4dcadb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.337843] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-523d740d-b056-45de-b520-7ba460713648 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.346260] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08f9e07a-ac84-45e9-809f-3e27c2c32849 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.361549] env[62522]: DEBUG nova.compute.provider_tree [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 656.392050] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 656.392373] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9e9bc3b1-a12a-483e-8a91-e5c2ed8394e5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.400933] env[62522]: DEBUG oslo_vmware.api [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Waiting for the task: (returnval){ [ 656.400933] env[62522]: value = "task-2415026" [ 656.400933] env[62522]: _type = "Task" [ 656.400933] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.410388] env[62522]: DEBUG oslo_vmware.api [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': task-2415026, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.447802] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1ba7ee8b-a939-4512-874d-bd36f9c03184 tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Lock "a5657a70-5374-4d52-be9a-2d05f9556d16" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.353s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 656.504213] env[62522]: INFO nova.compute.manager [-] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Took 1.74 seconds to deallocate network for instance. [ 656.598226] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Releasing lock "refresh_cache-bf2ccaeb-610a-437b-be94-d3caefbe15c5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 656.598226] env[62522]: DEBUG nova.compute.manager [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Instance network_info: |[{"id": "fd0b859a-1918-4692-a81c-b2b0e41951a5", "address": "fa:16:3e:fc:a5:bb", "network": {"id": "bfcd05f5-bbfb-49bd-9cf7-5fca1d018b65", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-147366971-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83e5b02095fc42ea9b8a2fb7c4900fe3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e99c063c-0cb7-4db6-b077-114166cfe889", "external-id": "nsx-vlan-transportzone-462", "segmentation_id": 462, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd0b859a-19", "ovs_interfaceid": "fd0b859a-1918-4692-a81c-b2b0e41951a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 656.598361] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:a5:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e99c063c-0cb7-4db6-b077-114166cfe889', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fd0b859a-1918-4692-a81c-b2b0e41951a5', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 656.608142] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Creating folder: Project (83e5b02095fc42ea9b8a2fb7c4900fe3). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 656.608697] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1ac55ecd-3276-42a5-b8f4-39e7c7c4c821 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.620051] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Created folder: Project (83e5b02095fc42ea9b8a2fb7c4900fe3) in parent group-v489562. [ 656.620622] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Creating folder: Instances. Parent ref: group-v489593. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 656.621138] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-af5cca49-db5d-4ee6-a101-d3575f869750 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.632672] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Created folder: Instances in parent group-v489593. [ 656.632971] env[62522]: DEBUG oslo.service.loopingcall [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 656.633110] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 656.633326] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3dc7c077-2a0c-4a5e-b026-b02ab268533e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.656488] env[62522]: DEBUG nova.compute.manager [req-9ba3e1cf-dbb9-4ca3-9751-f79ad67bd0a6 req-cc48d429-8d4d-4a55-b65c-74b61c4ddc4b service nova] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Received event network-vif-plugged-fd0b859a-1918-4692-a81c-b2b0e41951a5 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 656.656779] env[62522]: DEBUG oslo_concurrency.lockutils [req-9ba3e1cf-dbb9-4ca3-9751-f79ad67bd0a6 req-cc48d429-8d4d-4a55-b65c-74b61c4ddc4b service nova] Acquiring lock "bf2ccaeb-610a-437b-be94-d3caefbe15c5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 656.657054] env[62522]: DEBUG oslo_concurrency.lockutils [req-9ba3e1cf-dbb9-4ca3-9751-f79ad67bd0a6 req-cc48d429-8d4d-4a55-b65c-74b61c4ddc4b service nova] Lock "bf2ccaeb-610a-437b-be94-d3caefbe15c5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 656.657280] env[62522]: DEBUG oslo_concurrency.lockutils [req-9ba3e1cf-dbb9-4ca3-9751-f79ad67bd0a6 req-cc48d429-8d4d-4a55-b65c-74b61c4ddc4b service nova] Lock "bf2ccaeb-610a-437b-be94-d3caefbe15c5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 656.657628] env[62522]: DEBUG nova.compute.manager [req-9ba3e1cf-dbb9-4ca3-9751-f79ad67bd0a6 req-cc48d429-8d4d-4a55-b65c-74b61c4ddc4b service nova] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] No waiting events found dispatching network-vif-plugged-fd0b859a-1918-4692-a81c-b2b0e41951a5 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 656.657767] env[62522]: WARNING nova.compute.manager [req-9ba3e1cf-dbb9-4ca3-9751-f79ad67bd0a6 req-cc48d429-8d4d-4a55-b65c-74b61c4ddc4b service nova] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Received unexpected event network-vif-plugged-fd0b859a-1918-4692-a81c-b2b0e41951a5 for instance with vm_state building and task_state spawning. [ 656.657894] env[62522]: DEBUG nova.compute.manager [req-9ba3e1cf-dbb9-4ca3-9751-f79ad67bd0a6 req-cc48d429-8d4d-4a55-b65c-74b61c4ddc4b service nova] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Received event network-changed-fd0b859a-1918-4692-a81c-b2b0e41951a5 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 656.658114] env[62522]: DEBUG nova.compute.manager [req-9ba3e1cf-dbb9-4ca3-9751-f79ad67bd0a6 req-cc48d429-8d4d-4a55-b65c-74b61c4ddc4b service nova] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Refreshing instance network info cache due to event network-changed-fd0b859a-1918-4692-a81c-b2b0e41951a5. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 656.658880] env[62522]: DEBUG oslo_concurrency.lockutils [req-9ba3e1cf-dbb9-4ca3-9751-f79ad67bd0a6 req-cc48d429-8d4d-4a55-b65c-74b61c4ddc4b service nova] Acquiring lock "refresh_cache-bf2ccaeb-610a-437b-be94-d3caefbe15c5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 656.659211] env[62522]: DEBUG oslo_concurrency.lockutils [req-9ba3e1cf-dbb9-4ca3-9751-f79ad67bd0a6 req-cc48d429-8d4d-4a55-b65c-74b61c4ddc4b service nova] Acquired lock "refresh_cache-bf2ccaeb-610a-437b-be94-d3caefbe15c5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.659443] env[62522]: DEBUG nova.network.neutron [req-9ba3e1cf-dbb9-4ca3-9751-f79ad67bd0a6 req-cc48d429-8d4d-4a55-b65c-74b61c4ddc4b service nova] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Refreshing network info cache for port fd0b859a-1918-4692-a81c-b2b0e41951a5 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 656.665474] env[62522]: DEBUG nova.compute.manager [req-45e4dcb8-8c05-45c9-a3af-2e6ddda89b26 req-27d4438b-3982-4eed-aeb1-0c72cb8cf5c0 service nova] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Received event network-vif-plugged-e05da487-a40f-44d2-a390-d0795275ff10 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 656.665474] env[62522]: DEBUG oslo_concurrency.lockutils [req-45e4dcb8-8c05-45c9-a3af-2e6ddda89b26 req-27d4438b-3982-4eed-aeb1-0c72cb8cf5c0 service nova] Acquiring lock "a804f755-58b2-4350-8726-4e82f60afcdc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 656.665474] env[62522]: DEBUG oslo_concurrency.lockutils [req-45e4dcb8-8c05-45c9-a3af-2e6ddda89b26 req-27d4438b-3982-4eed-aeb1-0c72cb8cf5c0 service nova] Lock "a804f755-58b2-4350-8726-4e82f60afcdc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 656.665474] env[62522]: DEBUG oslo_concurrency.lockutils [req-45e4dcb8-8c05-45c9-a3af-2e6ddda89b26 req-27d4438b-3982-4eed-aeb1-0c72cb8cf5c0 service nova] Lock "a804f755-58b2-4350-8726-4e82f60afcdc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 656.665474] env[62522]: DEBUG nova.compute.manager [req-45e4dcb8-8c05-45c9-a3af-2e6ddda89b26 req-27d4438b-3982-4eed-aeb1-0c72cb8cf5c0 service nova] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] No waiting events found dispatching network-vif-plugged-e05da487-a40f-44d2-a390-d0795275ff10 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 656.665736] env[62522]: WARNING nova.compute.manager [req-45e4dcb8-8c05-45c9-a3af-2e6ddda89b26 req-27d4438b-3982-4eed-aeb1-0c72cb8cf5c0 service nova] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Received unexpected event network-vif-plugged-e05da487-a40f-44d2-a390-d0795275ff10 for instance with vm_state building and task_state spawning. [ 656.665736] env[62522]: DEBUG nova.compute.manager [req-45e4dcb8-8c05-45c9-a3af-2e6ddda89b26 req-27d4438b-3982-4eed-aeb1-0c72cb8cf5c0 service nova] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Received event network-changed-e05da487-a40f-44d2-a390-d0795275ff10 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 656.665736] env[62522]: DEBUG nova.compute.manager [req-45e4dcb8-8c05-45c9-a3af-2e6ddda89b26 req-27d4438b-3982-4eed-aeb1-0c72cb8cf5c0 service nova] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Refreshing instance network info cache due to event network-changed-e05da487-a40f-44d2-a390-d0795275ff10. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 656.665822] env[62522]: DEBUG oslo_concurrency.lockutils [req-45e4dcb8-8c05-45c9-a3af-2e6ddda89b26 req-27d4438b-3982-4eed-aeb1-0c72cb8cf5c0 service nova] Acquiring lock "refresh_cache-a804f755-58b2-4350-8726-4e82f60afcdc" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 656.671652] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 656.671652] env[62522]: value = "task-2415029" [ 656.671652] env[62522]: _type = "Task" [ 656.671652] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.679919] env[62522]: DEBUG oslo_vmware.api [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Task: {'id': task-2415025, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.686498] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415029, 'name': CreateVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.813103] env[62522]: DEBUG nova.network.neutron [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 656.866133] env[62522]: DEBUG nova.scheduler.client.report [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 656.920425] env[62522]: DEBUG oslo_vmware.api [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': task-2415026, 'name': PowerOffVM_Task, 'duration_secs': 0.213871} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.920863] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 656.921292] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 656.922341] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b4560ec-ecb3-4fd6-9421-99c5c698223d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.930602] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 656.930916] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9e27f7d9-c9e7-41b4-9e80-796da576bb63 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.953296] env[62522]: DEBUG nova.compute.manager [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 656.962315] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 656.962918] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 656.962918] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Deleting the datastore file [datastore2] 9a098809-cc26-4210-b09e-b7825c406294 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 656.963743] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a1eadb0a-af5c-4dda-ab18-4ed1a6dd25db {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.970063] env[62522]: DEBUG oslo_vmware.api [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Waiting for the task: (returnval){ [ 656.970063] env[62522]: value = "task-2415031" [ 656.970063] env[62522]: _type = "Task" [ 656.970063] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.986334] env[62522]: DEBUG oslo_vmware.api [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': task-2415031, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.012118] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bb32ad7a-c1e4-4ac7-9d2a-960491da577f tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 657.104843] env[62522]: DEBUG nova.network.neutron [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Updating instance_info_cache with network_info: [{"id": "e05da487-a40f-44d2-a390-d0795275ff10", "address": "fa:16:3e:6a:93:32", "network": {"id": "0c30d1a3-4c52-426c-a107-95453e519e90", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-991475332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "707c18dc3f934d35b85e59f08ea537ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape05da487-a4", "ovs_interfaceid": "e05da487-a40f-44d2-a390-d0795275ff10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.183470] env[62522]: DEBUG oslo_vmware.api [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Task: {'id': task-2415025, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.597269} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.184219] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 84ad5317-344d-44c1-9318-fa1574321296/84ad5317-344d-44c1-9318-fa1574321296.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 657.184495] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 657.184763] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d9c0a45b-9a0e-479a-bc35-81d2c932ed86 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.197541] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415029, 'name': CreateVM_Task, 'duration_secs': 0.379254} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.198330] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 657.199454] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 657.199849] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.199912] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 657.200283] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7b6f3b3-64fa-4ced-9978-a906e01a8f1e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.203789] env[62522]: DEBUG oslo_vmware.api [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Waiting for the task: (returnval){ [ 657.203789] env[62522]: value = "task-2415032" [ 657.203789] env[62522]: _type = "Task" [ 657.203789] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.210274] env[62522]: DEBUG oslo_vmware.api [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Waiting for the task: (returnval){ [ 657.210274] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52ceff2a-651b-3f94-954b-1b7b24253d20" [ 657.210274] env[62522]: _type = "Task" [ 657.210274] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.220382] env[62522]: DEBUG oslo_vmware.api [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Task: {'id': task-2415032, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.232527] env[62522]: DEBUG oslo_vmware.api [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52ceff2a-651b-3f94-954b-1b7b24253d20, 'name': SearchDatastore_Task, 'duration_secs': 0.010716} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.232527] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 657.233621] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 657.233621] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 657.233621] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.233621] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 657.233621] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-78fff588-0c0b-4b63-8570-3883ae529c69 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.244233] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 657.244416] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 657.245232] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e11bfde-54d3-4a5d-8994-4b8cef4eeae5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.252147] env[62522]: DEBUG oslo_vmware.api [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Waiting for the task: (returnval){ [ 657.252147] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5291151d-f182-8944-2bce-ef8c32a0bb9b" [ 657.252147] env[62522]: _type = "Task" [ 657.252147] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.262555] env[62522]: DEBUG oslo_vmware.api [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5291151d-f182-8944-2bce-ef8c32a0bb9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.377261] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.547s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 657.377261] env[62522]: DEBUG nova.compute.manager [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 657.384222] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 12.188s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 657.487020] env[62522]: DEBUG oslo_vmware.api [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': task-2415031, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.121054} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.487135] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 657.487321] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 657.487501] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 657.499259] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 657.523483] env[62522]: DEBUG nova.network.neutron [req-9ba3e1cf-dbb9-4ca3-9751-f79ad67bd0a6 req-cc48d429-8d4d-4a55-b65c-74b61c4ddc4b service nova] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Updated VIF entry in instance network info cache for port fd0b859a-1918-4692-a81c-b2b0e41951a5. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 657.523592] env[62522]: DEBUG nova.network.neutron [req-9ba3e1cf-dbb9-4ca3-9751-f79ad67bd0a6 req-cc48d429-8d4d-4a55-b65c-74b61c4ddc4b service nova] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Updating instance_info_cache with network_info: [{"id": "fd0b859a-1918-4692-a81c-b2b0e41951a5", "address": "fa:16:3e:fc:a5:bb", "network": {"id": "bfcd05f5-bbfb-49bd-9cf7-5fca1d018b65", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-147366971-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83e5b02095fc42ea9b8a2fb7c4900fe3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e99c063c-0cb7-4db6-b077-114166cfe889", "external-id": "nsx-vlan-transportzone-462", "segmentation_id": 462, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd0b859a-19", "ovs_interfaceid": "fd0b859a-1918-4692-a81c-b2b0e41951a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.552359] env[62522]: DEBUG oslo_concurrency.lockutils [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Acquiring lock "87a90c88-6e0a-4051-8978-b2f9c5a876ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 657.552611] env[62522]: DEBUG oslo_concurrency.lockutils [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Lock "87a90c88-6e0a-4051-8978-b2f9c5a876ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 657.607551] env[62522]: DEBUG oslo_concurrency.lockutils [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Releasing lock "refresh_cache-a804f755-58b2-4350-8726-4e82f60afcdc" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 657.607903] env[62522]: DEBUG nova.compute.manager [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Instance network_info: |[{"id": "e05da487-a40f-44d2-a390-d0795275ff10", "address": "fa:16:3e:6a:93:32", "network": {"id": "0c30d1a3-4c52-426c-a107-95453e519e90", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-991475332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "707c18dc3f934d35b85e59f08ea537ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape05da487-a4", "ovs_interfaceid": "e05da487-a40f-44d2-a390-d0795275ff10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 657.608734] env[62522]: DEBUG oslo_concurrency.lockutils [req-45e4dcb8-8c05-45c9-a3af-2e6ddda89b26 req-27d4438b-3982-4eed-aeb1-0c72cb8cf5c0 service nova] Acquired lock "refresh_cache-a804f755-58b2-4350-8726-4e82f60afcdc" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.608734] env[62522]: DEBUG nova.network.neutron [req-45e4dcb8-8c05-45c9-a3af-2e6ddda89b26 req-27d4438b-3982-4eed-aeb1-0c72cb8cf5c0 service nova] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Refreshing network info cache for port e05da487-a40f-44d2-a390-d0795275ff10 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 657.610176] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6a:93:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '46785c9c-8b22-487d-a854-b3e67c5ed1d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e05da487-a40f-44d2-a390-d0795275ff10', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 657.618230] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Creating folder: Project (707c18dc3f934d35b85e59f08ea537ca). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 657.619415] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-279e5e61-72d2-44a4-8370-ac81302f2283 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.631202] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Created folder: Project (707c18dc3f934d35b85e59f08ea537ca) in parent group-v489562. [ 657.631530] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Creating folder: Instances. Parent ref: group-v489596. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 657.631642] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-97fc05cb-39d8-47fa-bc93-3234bdf71b7b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.642049] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Created folder: Instances in parent group-v489596. [ 657.642049] env[62522]: DEBUG oslo.service.loopingcall [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 657.642049] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 657.642194] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-97d56f5a-da29-4d27-a876-b50d4bb03a7b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.665805] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 657.665805] env[62522]: value = "task-2415035" [ 657.665805] env[62522]: _type = "Task" [ 657.665805] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.677903] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415035, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.719518] env[62522]: DEBUG oslo_vmware.api [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Task: {'id': task-2415032, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085801} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.719518] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 657.719921] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d140be70-c315-4822-a947-382e0dca086e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.748035] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Reconfiguring VM instance instance-0000000b to attach disk [datastore2] 84ad5317-344d-44c1-9318-fa1574321296/84ad5317-344d-44c1-9318-fa1574321296.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 657.748866] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac3d6234-3d4c-4951-b345-70eaa622d6a0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.778315] env[62522]: DEBUG oslo_vmware.api [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5291151d-f182-8944-2bce-ef8c32a0bb9b, 'name': SearchDatastore_Task, 'duration_secs': 0.009926} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.780494] env[62522]: DEBUG oslo_vmware.api [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Waiting for the task: (returnval){ [ 657.780494] env[62522]: value = "task-2415036" [ 657.780494] env[62522]: _type = "Task" [ 657.780494] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.780734] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac116ed6-b034-48d2-ae7e-9a57b2eaae8d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.791794] env[62522]: DEBUG oslo_vmware.api [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Waiting for the task: (returnval){ [ 657.791794] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c5b83f-1ff2-a5b1-f5c7-c29ae4df002d" [ 657.791794] env[62522]: _type = "Task" [ 657.791794] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.793580] env[62522]: DEBUG oslo_vmware.api [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Task: {'id': task-2415036, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.803772] env[62522]: DEBUG oslo_vmware.api [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c5b83f-1ff2-a5b1-f5c7-c29ae4df002d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.899684] env[62522]: DEBUG nova.compute.utils [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 657.902710] env[62522]: DEBUG nova.compute.manager [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 657.902974] env[62522]: DEBUG nova.network.neutron [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 658.022129] env[62522]: DEBUG nova.policy [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ee727c0bfc05441a8d4cccf6f8728e9c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9c4c1e4d92254cbd89569f7c135646a5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 658.030165] env[62522]: DEBUG oslo_concurrency.lockutils [req-9ba3e1cf-dbb9-4ca3-9751-f79ad67bd0a6 req-cc48d429-8d4d-4a55-b65c-74b61c4ddc4b service nova] Releasing lock "refresh_cache-bf2ccaeb-610a-437b-be94-d3caefbe15c5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 658.030165] env[62522]: DEBUG nova.compute.manager [req-9ba3e1cf-dbb9-4ca3-9751-f79ad67bd0a6 req-cc48d429-8d4d-4a55-b65c-74b61c4ddc4b service nova] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Received event network-vif-deleted-20713b6e-4b87-4065-a83b-f62812551cd5 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 658.179279] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415035, 'name': CreateVM_Task, 'duration_secs': 0.470028} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.179489] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 658.186118] env[62522]: DEBUG oslo_concurrency.lockutils [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 658.186118] env[62522]: DEBUG oslo_concurrency.lockutils [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.186118] env[62522]: DEBUG oslo_concurrency.lockutils [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 658.186118] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c64f26a-0f3c-4661-bbac-f42a2f9f99ad {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.192522] env[62522]: DEBUG oslo_vmware.api [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Waiting for the task: (returnval){ [ 658.192522] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52208522-9016-f38c-87ee-83cb4c5b3753" [ 658.192522] env[62522]: _type = "Task" [ 658.192522] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.212614] env[62522]: DEBUG oslo_vmware.api [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52208522-9016-f38c-87ee-83cb4c5b3753, 'name': SearchDatastore_Task, 'duration_secs': 0.009818} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.213369] env[62522]: DEBUG oslo_concurrency.lockutils [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 658.213595] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 658.213867] env[62522]: DEBUG oslo_concurrency.lockutils [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 658.296156] env[62522]: DEBUG oslo_vmware.api [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Task: {'id': task-2415036, 'name': ReconfigVM_Task, 'duration_secs': 0.373463} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.300993] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Reconfigured VM instance instance-0000000b to attach disk [datastore2] 84ad5317-344d-44c1-9318-fa1574321296/84ad5317-344d-44c1-9318-fa1574321296.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 658.302366] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-95db9a47-0bc7-4984-b0dd-b14f04cd95e1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.316284] env[62522]: DEBUG oslo_vmware.api [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Waiting for the task: (returnval){ [ 658.316284] env[62522]: value = "task-2415037" [ 658.316284] env[62522]: _type = "Task" [ 658.316284] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.316979] env[62522]: DEBUG oslo_vmware.api [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c5b83f-1ff2-a5b1-f5c7-c29ae4df002d, 'name': SearchDatastore_Task, 'duration_secs': 0.02253} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.316979] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 658.317285] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] bf2ccaeb-610a-437b-be94-d3caefbe15c5/bf2ccaeb-610a-437b-be94-d3caefbe15c5.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 658.324389] env[62522]: DEBUG oslo_concurrency.lockutils [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.324389] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 658.324389] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1f7db766-8c2b-413c-b64c-1376c00585fa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.330928] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-789d069b-ae92-48bb-879f-1acb52b41d4b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.343294] env[62522]: DEBUG oslo_vmware.api [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Task: {'id': task-2415037, 'name': Rename_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.345661] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 658.345833] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 658.347797] env[62522]: DEBUG oslo_vmware.api [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Waiting for the task: (returnval){ [ 658.347797] env[62522]: value = "task-2415038" [ 658.347797] env[62522]: _type = "Task" [ 658.347797] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.348017] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-165900ae-eb84-42de-ad99-8d73392df3a8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.360350] env[62522]: DEBUG oslo_vmware.api [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Waiting for the task: (returnval){ [ 658.360350] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52fdc376-96b6-3812-72ef-c55e60c6465a" [ 658.360350] env[62522]: _type = "Task" [ 658.360350] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.365253] env[62522]: DEBUG oslo_vmware.api [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Task: {'id': task-2415038, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.376212] env[62522]: DEBUG oslo_vmware.api [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52fdc376-96b6-3812-72ef-c55e60c6465a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.408229] env[62522]: DEBUG nova.compute.manager [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 658.443563] env[62522]: WARNING nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance c8779822-1694-463e-bd06-5f84d867d1bd is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 658.443713] env[62522]: WARNING nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 7828f9c8-fc02-4218-ba93-5362af807dad is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 658.443837] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 74b6ae10-a595-4139-8eda-38fe1aa298cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 658.444036] env[62522]: WARNING nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 758ed671-347a-4949-9842-2f8cdcd261ae is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 658.444219] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 3824a70e-8498-410a-904d-c7cd0de0c358 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 658.444373] env[62522]: WARNING nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 678b6b5f-b410-4c55-872e-4a74da6d7ebc is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 658.444491] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance a5657a70-5374-4d52-be9a-2d05f9556d16 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 658.444603] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 9a098809-cc26-4210-b09e-b7825c406294 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 658.444745] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 84ad5317-344d-44c1-9318-fa1574321296 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 658.444860] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance bf2ccaeb-610a-437b-be94-d3caefbe15c5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 658.444984] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance a804f755-58b2-4350-8726-4e82f60afcdc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 658.445107] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 95e4fe36-6830-4fc4-bb53-1e5643c2f95b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 658.534154] env[62522]: DEBUG nova.virt.hardware [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 658.534154] env[62522]: DEBUG nova.virt.hardware [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 658.534154] env[62522]: DEBUG nova.virt.hardware [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 658.534682] env[62522]: DEBUG nova.virt.hardware [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 658.535359] env[62522]: DEBUG nova.virt.hardware [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 658.536326] env[62522]: DEBUG nova.virt.hardware [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 658.537924] env[62522]: DEBUG nova.virt.hardware [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 658.537924] env[62522]: DEBUG nova.virt.hardware [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 658.537924] env[62522]: DEBUG nova.virt.hardware [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 658.537924] env[62522]: DEBUG nova.virt.hardware [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 658.537924] env[62522]: DEBUG nova.virt.hardware [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 658.541000] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83d10a93-0f57-454b-a1e6-67a01b3495f0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.558125] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ae7d66b-c6ae-4e4a-8777-fb5bed3949d5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.580713] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Instance VIF info [] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 658.588701] env[62522]: DEBUG oslo.service.loopingcall [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 658.589621] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 658.589985] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1ec4c32b-1c0c-4282-8edb-a70e8583fc47 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.613311] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 658.613311] env[62522]: value = "task-2415039" [ 658.613311] env[62522]: _type = "Task" [ 658.613311] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.623695] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415039, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.826591] env[62522]: DEBUG oslo_vmware.api [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Task: {'id': task-2415037, 'name': Rename_Task, 'duration_secs': 0.150832} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.826969] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 658.827162] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-75bb30f0-315f-49ef-9114-78c01dda8cb7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.834745] env[62522]: DEBUG oslo_vmware.api [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Waiting for the task: (returnval){ [ 658.834745] env[62522]: value = "task-2415040" [ 658.834745] env[62522]: _type = "Task" [ 658.834745] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.842867] env[62522]: DEBUG oslo_vmware.api [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Task: {'id': task-2415040, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.861116] env[62522]: DEBUG oslo_vmware.api [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Task: {'id': task-2415038, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.876095] env[62522]: DEBUG oslo_vmware.api [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52fdc376-96b6-3812-72ef-c55e60c6465a, 'name': SearchDatastore_Task, 'duration_secs': 0.018449} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.878414] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25aece16-0463-462e-bc9d-aca40925b5f7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.885518] env[62522]: DEBUG oslo_vmware.api [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Waiting for the task: (returnval){ [ 658.885518] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e18352-78af-89ac-46c0-a8cc74d6298a" [ 658.885518] env[62522]: _type = "Task" [ 658.885518] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.897128] env[62522]: DEBUG oslo_vmware.api [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e18352-78af-89ac-46c0-a8cc74d6298a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.918821] env[62522]: DEBUG nova.network.neutron [req-45e4dcb8-8c05-45c9-a3af-2e6ddda89b26 req-27d4438b-3982-4eed-aeb1-0c72cb8cf5c0 service nova] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Updated VIF entry in instance network info cache for port e05da487-a40f-44d2-a390-d0795275ff10. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 658.918821] env[62522]: DEBUG nova.network.neutron [req-45e4dcb8-8c05-45c9-a3af-2e6ddda89b26 req-27d4438b-3982-4eed-aeb1-0c72cb8cf5c0 service nova] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Updating instance_info_cache with network_info: [{"id": "e05da487-a40f-44d2-a390-d0795275ff10", "address": "fa:16:3e:6a:93:32", "network": {"id": "0c30d1a3-4c52-426c-a107-95453e519e90", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-991475332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "707c18dc3f934d35b85e59f08ea537ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape05da487-a4", "ovs_interfaceid": "e05da487-a40f-44d2-a390-d0795275ff10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.953320] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 4de70165-c28f-44b7-a01a-caa0787170b8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 659.023820] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Acquiring lock "a5657a70-5374-4d52-be9a-2d05f9556d16" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.024113] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Lock "a5657a70-5374-4d52-be9a-2d05f9556d16" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.024376] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Acquiring lock "a5657a70-5374-4d52-be9a-2d05f9556d16-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.024506] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Lock "a5657a70-5374-4d52-be9a-2d05f9556d16-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.024741] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Lock "a5657a70-5374-4d52-be9a-2d05f9556d16-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 659.028087] env[62522]: INFO nova.compute.manager [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Terminating instance [ 659.099236] env[62522]: DEBUG nova.network.neutron [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Successfully created port: fd45a2e0-42d5-4bd8-89d5-73200646889d {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 659.122911] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415039, 'name': CreateVM_Task, 'duration_secs': 0.433478} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.123269] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 659.123705] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 659.123859] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.124359] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 659.124625] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1cc4e97b-cb84-4d77-a894-dad2b6aeb5a5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.129498] env[62522]: DEBUG oslo_vmware.api [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Waiting for the task: (returnval){ [ 659.129498] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]527596b5-eddf-7bb5-2e10-1361dc053724" [ 659.129498] env[62522]: _type = "Task" [ 659.129498] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.139349] env[62522]: DEBUG oslo_vmware.api [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]527596b5-eddf-7bb5-2e10-1361dc053724, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.347293] env[62522]: DEBUG oslo_vmware.api [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Task: {'id': task-2415040, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.364873] env[62522]: DEBUG oslo_vmware.api [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Task: {'id': task-2415038, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.557334} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.365268] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] bf2ccaeb-610a-437b-be94-d3caefbe15c5/bf2ccaeb-610a-437b-be94-d3caefbe15c5.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 659.367151] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 659.367306] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4ffcfbe1-bd7b-4f5e-a73e-1ed50b9a0496 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.376902] env[62522]: DEBUG oslo_vmware.api [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Waiting for the task: (returnval){ [ 659.376902] env[62522]: value = "task-2415041" [ 659.376902] env[62522]: _type = "Task" [ 659.376902] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.386671] env[62522]: DEBUG oslo_vmware.api [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Task: {'id': task-2415041, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.398712] env[62522]: DEBUG oslo_vmware.api [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e18352-78af-89ac-46c0-a8cc74d6298a, 'name': SearchDatastore_Task, 'duration_secs': 0.016312} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.398712] env[62522]: DEBUG oslo_concurrency.lockutils [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 659.398712] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] a804f755-58b2-4350-8726-4e82f60afcdc/a804f755-58b2-4350-8726-4e82f60afcdc.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 659.398712] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1037a8e2-de99-4f88-9fd5-bca7446545b6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.405817] env[62522]: DEBUG oslo_vmware.api [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Waiting for the task: (returnval){ [ 659.405817] env[62522]: value = "task-2415042" [ 659.405817] env[62522]: _type = "Task" [ 659.405817] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.416034] env[62522]: DEBUG oslo_vmware.api [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Task: {'id': task-2415042, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.421846] env[62522]: DEBUG nova.compute.manager [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 659.426043] env[62522]: DEBUG oslo_concurrency.lockutils [req-45e4dcb8-8c05-45c9-a3af-2e6ddda89b26 req-27d4438b-3982-4eed-aeb1-0c72cb8cf5c0 service nova] Releasing lock "refresh_cache-a804f755-58b2-4350-8726-4e82f60afcdc" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 659.459117] env[62522]: DEBUG nova.virt.hardware [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 659.459117] env[62522]: DEBUG nova.virt.hardware [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 659.459117] env[62522]: DEBUG nova.virt.hardware [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 659.459117] env[62522]: DEBUG nova.virt.hardware [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 659.459347] env[62522]: DEBUG nova.virt.hardware [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 659.459347] env[62522]: DEBUG nova.virt.hardware [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 659.461950] env[62522]: DEBUG nova.virt.hardware [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 659.461950] env[62522]: DEBUG nova.virt.hardware [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 659.461950] env[62522]: DEBUG nova.virt.hardware [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 659.461950] env[62522]: DEBUG nova.virt.hardware [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 659.461950] env[62522]: DEBUG nova.virt.hardware [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 659.465102] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 659.465102] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb3a51d9-ce95-4dcd-9947-cf9f8d6781c9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.475148] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144d0873-d2d0-416d-b5de-7b03204a301b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.533088] env[62522]: DEBUG nova.compute.manager [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 659.533088] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 659.534332] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9133b564-ae8b-44da-8c9e-58cbcccd972c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.546602] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 659.546602] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fda923ab-a0ab-4400-966d-439d09890b66 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.551659] env[62522]: DEBUG oslo_vmware.api [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Waiting for the task: (returnval){ [ 659.551659] env[62522]: value = "task-2415043" [ 659.551659] env[62522]: _type = "Task" [ 659.551659] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.565244] env[62522]: DEBUG oslo_vmware.api [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Task: {'id': task-2415043, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.644104] env[62522]: DEBUG oslo_vmware.api [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]527596b5-eddf-7bb5-2e10-1361dc053724, 'name': SearchDatastore_Task, 'duration_secs': 0.01241} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.644104] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 659.644104] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 659.644104] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 659.644375] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.644375] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 659.645125] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9bf285d2-48f6-4501-b6fb-f1e2ffc98cce {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.660247] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 659.660466] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 659.661231] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b18d4fe4-0bb5-4bdf-947e-367b5231549a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.670327] env[62522]: DEBUG oslo_vmware.api [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Waiting for the task: (returnval){ [ 659.670327] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52803ac9-3541-fc32-9ee8-396e0e34ceaa" [ 659.670327] env[62522]: _type = "Task" [ 659.670327] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.677375] env[62522]: DEBUG oslo_vmware.api [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52803ac9-3541-fc32-9ee8-396e0e34ceaa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.845401] env[62522]: DEBUG oslo_vmware.api [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Task: {'id': task-2415040, 'name': PowerOnVM_Task, 'duration_secs': 0.694483} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.845675] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 659.845892] env[62522]: INFO nova.compute.manager [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Took 10.19 seconds to spawn the instance on the hypervisor. [ 659.846097] env[62522]: DEBUG nova.compute.manager [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 659.846884] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1437cdc-6541-4685-b573-2deae1975bc7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.886184] env[62522]: DEBUG oslo_vmware.api [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Task: {'id': task-2415041, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074333} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.886184] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 659.886921] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ec81942-cf46-4c33-b471-b012c3c44b2a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.910766] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Reconfiguring VM instance instance-0000000c to attach disk [datastore2] bf2ccaeb-610a-437b-be94-d3caefbe15c5/bf2ccaeb-610a-437b-be94-d3caefbe15c5.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 659.911835] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-77ce7f45-ad69-4d55-bc9e-39cc91618c18 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.936172] env[62522]: DEBUG oslo_vmware.api [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Task: {'id': task-2415042, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.936172] env[62522]: DEBUG oslo_vmware.api [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Waiting for the task: (returnval){ [ 659.936172] env[62522]: value = "task-2415044" [ 659.936172] env[62522]: _type = "Task" [ 659.936172] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.944668] env[62522]: DEBUG oslo_vmware.api [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Task: {'id': task-2415044, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.970572] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance c73686c6-4dd8-4f00-a65a-5d8574409ad1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 660.064980] env[62522]: DEBUG oslo_vmware.api [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Task: {'id': task-2415043, 'name': PowerOffVM_Task, 'duration_secs': 0.210289} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.065413] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 660.065713] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 660.066092] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f4536ac6-f633-46a8-9397-86b2d868700c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.143180] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 660.143417] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 660.143867] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Deleting the datastore file [datastore2] a5657a70-5374-4d52-be9a-2d05f9556d16 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 660.144320] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-48f6f002-20b7-42d1-8410-34e38e0d7433 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.152177] env[62522]: DEBUG oslo_vmware.api [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Waiting for the task: (returnval){ [ 660.152177] env[62522]: value = "task-2415046" [ 660.152177] env[62522]: _type = "Task" [ 660.152177] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.164022] env[62522]: DEBUG oslo_vmware.api [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Task: {'id': task-2415046, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.180514] env[62522]: DEBUG oslo_vmware.api [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52803ac9-3541-fc32-9ee8-396e0e34ceaa, 'name': SearchDatastore_Task, 'duration_secs': 0.024458} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.181308] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-511c0fdb-7be7-47e2-bcb7-c066364216e8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.188491] env[62522]: DEBUG oslo_vmware.api [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Waiting for the task: (returnval){ [ 660.188491] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52979450-4dee-43da-67e9-8cfe55c89c59" [ 660.188491] env[62522]: _type = "Task" [ 660.188491] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.197334] env[62522]: DEBUG oslo_vmware.api [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52979450-4dee-43da-67e9-8cfe55c89c59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.372839] env[62522]: INFO nova.compute.manager [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Took 26.04 seconds to build instance. [ 660.426912] env[62522]: DEBUG oslo_vmware.api [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Task: {'id': task-2415042, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.834478} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.432429] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] a804f755-58b2-4350-8726-4e82f60afcdc/a804f755-58b2-4350-8726-4e82f60afcdc.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 660.432689] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 660.432968] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a7ceaf81-ca4f-4433-b790-4601dbe47ca2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.441228] env[62522]: DEBUG oslo_vmware.api [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Waiting for the task: (returnval){ [ 660.441228] env[62522]: value = "task-2415047" [ 660.441228] env[62522]: _type = "Task" [ 660.441228] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.449422] env[62522]: DEBUG oslo_vmware.api [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Task: {'id': task-2415044, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.455628] env[62522]: DEBUG oslo_vmware.api [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Task: {'id': task-2415047, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.474112] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 6d8b5429-113b-4280-9851-bf6614dde4a7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 660.667712] env[62522]: DEBUG oslo_vmware.api [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Task: {'id': task-2415046, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.197243} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.668199] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 660.668199] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 660.668418] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 660.668618] env[62522]: INFO nova.compute.manager [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Took 1.14 seconds to destroy the instance on the hypervisor. [ 660.668872] env[62522]: DEBUG oslo.service.loopingcall [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 660.669013] env[62522]: DEBUG nova.compute.manager [-] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 660.669110] env[62522]: DEBUG nova.network.neutron [-] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 660.702713] env[62522]: DEBUG oslo_vmware.api [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52979450-4dee-43da-67e9-8cfe55c89c59, 'name': SearchDatastore_Task, 'duration_secs': 0.039955} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.705853] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 660.705853] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 9a098809-cc26-4210-b09e-b7825c406294/9a098809-cc26-4210-b09e-b7825c406294.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 660.705853] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-47c81a33-36d6-4fa4-a38a-66287bcfea92 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.711726] env[62522]: DEBUG oslo_vmware.api [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Waiting for the task: (returnval){ [ 660.711726] env[62522]: value = "task-2415048" [ 660.711726] env[62522]: _type = "Task" [ 660.711726] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.721942] env[62522]: DEBUG oslo_vmware.api [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': task-2415048, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.875874] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e02c09ee-6d8f-47c6-a7b1-bdeb500451c6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Lock "84ad5317-344d-44c1-9318-fa1574321296" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.429s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 660.950502] env[62522]: DEBUG oslo_vmware.api [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Task: {'id': task-2415044, 'name': ReconfigVM_Task, 'duration_secs': 0.584133} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.951108] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Reconfigured VM instance instance-0000000c to attach disk [datastore2] bf2ccaeb-610a-437b-be94-d3caefbe15c5/bf2ccaeb-610a-437b-be94-d3caefbe15c5.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 660.951721] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-81a2c4f1-3399-4189-a149-1faeed222b1e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.956336] env[62522]: DEBUG oslo_vmware.api [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Task: {'id': task-2415047, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071562} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.956673] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 660.961554] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3fe6668-304f-4be7-8d36-654de5ed024c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.967607] env[62522]: DEBUG oslo_vmware.api [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Waiting for the task: (returnval){ [ 660.967607] env[62522]: value = "task-2415049" [ 660.967607] env[62522]: _type = "Task" [ 660.967607] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.985379] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 68b4c229-0ace-486f-9a99-d3c955b7bdfb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 660.997691] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Reconfiguring VM instance instance-0000000d to attach disk [datastore2] a804f755-58b2-4350-8726-4e82f60afcdc/a804f755-58b2-4350-8726-4e82f60afcdc.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 661.002112] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f03db253-2e59-4363-9c86-27b82bb4d7d1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.021614] env[62522]: DEBUG oslo_vmware.api [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Task: {'id': task-2415049, 'name': Rename_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.027801] env[62522]: DEBUG oslo_vmware.api [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Waiting for the task: (returnval){ [ 661.027801] env[62522]: value = "task-2415050" [ 661.027801] env[62522]: _type = "Task" [ 661.027801] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.037294] env[62522]: DEBUG oslo_vmware.api [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Task: {'id': task-2415050, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.225803] env[62522]: DEBUG oslo_vmware.api [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': task-2415048, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.291793] env[62522]: DEBUG nova.network.neutron [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Successfully updated port: fd45a2e0-42d5-4bd8-89d5-73200646889d {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 661.383662] env[62522]: DEBUG nova.compute.manager [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 661.483472] env[62522]: DEBUG oslo_vmware.api [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Task: {'id': task-2415049, 'name': Rename_Task, 'duration_secs': 0.246484} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.483472] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 661.483472] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e22b298a-95b2-470e-a0f5-a533509bcf90 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.489564] env[62522]: DEBUG oslo_vmware.api [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Waiting for the task: (returnval){ [ 661.489564] env[62522]: value = "task-2415051" [ 661.489564] env[62522]: _type = "Task" [ 661.489564] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.498730] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 879354d3-7423-41e2-93f6-0d8d3a120170 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 661.500164] env[62522]: DEBUG oslo_vmware.api [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Task: {'id': task-2415051, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.540647] env[62522]: DEBUG oslo_vmware.api [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Task: {'id': task-2415050, 'name': ReconfigVM_Task, 'duration_secs': 0.336795} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.540868] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Reconfigured VM instance instance-0000000d to attach disk [datastore2] a804f755-58b2-4350-8726-4e82f60afcdc/a804f755-58b2-4350-8726-4e82f60afcdc.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 661.541512] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8dca67c5-10ff-4644-a628-855f223d5c88 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.557825] env[62522]: DEBUG oslo_vmware.api [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Waiting for the task: (returnval){ [ 661.557825] env[62522]: value = "task-2415052" [ 661.557825] env[62522]: _type = "Task" [ 661.557825] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.566997] env[62522]: DEBUG oslo_vmware.api [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Task: {'id': task-2415052, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.661393] env[62522]: DEBUG nova.compute.manager [None req-6e2d71ba-010c-481e-9862-41ff005e9859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 661.662778] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f2ae235-ee56-4851-89f1-2407b6c6e5ab {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.721700] env[62522]: DEBUG nova.compute.manager [req-ea8022c4-4f52-4c68-94da-6ce61d13799c req-0245f487-fe33-4f86-a33e-d419ff9fb28a service nova] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Received event network-vif-plugged-fd45a2e0-42d5-4bd8-89d5-73200646889d {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 661.721923] env[62522]: DEBUG oslo_concurrency.lockutils [req-ea8022c4-4f52-4c68-94da-6ce61d13799c req-0245f487-fe33-4f86-a33e-d419ff9fb28a service nova] Acquiring lock "95e4fe36-6830-4fc4-bb53-1e5643c2f95b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 661.722163] env[62522]: DEBUG oslo_concurrency.lockutils [req-ea8022c4-4f52-4c68-94da-6ce61d13799c req-0245f487-fe33-4f86-a33e-d419ff9fb28a service nova] Lock "95e4fe36-6830-4fc4-bb53-1e5643c2f95b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 661.724134] env[62522]: DEBUG oslo_concurrency.lockutils [req-ea8022c4-4f52-4c68-94da-6ce61d13799c req-0245f487-fe33-4f86-a33e-d419ff9fb28a service nova] Lock "95e4fe36-6830-4fc4-bb53-1e5643c2f95b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 661.724134] env[62522]: DEBUG nova.compute.manager [req-ea8022c4-4f52-4c68-94da-6ce61d13799c req-0245f487-fe33-4f86-a33e-d419ff9fb28a service nova] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] No waiting events found dispatching network-vif-plugged-fd45a2e0-42d5-4bd8-89d5-73200646889d {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 661.724134] env[62522]: WARNING nova.compute.manager [req-ea8022c4-4f52-4c68-94da-6ce61d13799c req-0245f487-fe33-4f86-a33e-d419ff9fb28a service nova] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Received unexpected event network-vif-plugged-fd45a2e0-42d5-4bd8-89d5-73200646889d for instance with vm_state building and task_state spawning. [ 661.729414] env[62522]: DEBUG oslo_vmware.api [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': task-2415048, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.81961} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.729711] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 9a098809-cc26-4210-b09e-b7825c406294/9a098809-cc26-4210-b09e-b7825c406294.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 661.729868] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 661.730744] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-096b1323-dc68-4580-82bf-f520be0c04c9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.737216] env[62522]: DEBUG oslo_vmware.api [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Waiting for the task: (returnval){ [ 661.737216] env[62522]: value = "task-2415053" [ 661.737216] env[62522]: _type = "Task" [ 661.737216] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.746249] env[62522]: DEBUG oslo_vmware.api [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': task-2415053, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.795085] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Acquiring lock "refresh_cache-95e4fe36-6830-4fc4-bb53-1e5643c2f95b" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 661.795227] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Acquired lock "refresh_cache-95e4fe36-6830-4fc4-bb53-1e5643c2f95b" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.795505] env[62522]: DEBUG nova.network.neutron [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 661.913902] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 662.006204] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 19d3d54c-5ba1-420f-b012-a08add8546c9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 662.006204] env[62522]: DEBUG oslo_vmware.api [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Task: {'id': task-2415051, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.010228] env[62522]: DEBUG nova.network.neutron [-] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.073701] env[62522]: DEBUG oslo_vmware.api [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Task: {'id': task-2415052, 'name': Rename_Task, 'duration_secs': 0.14584} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.074070] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 662.075717] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-55c0c4f2-e571-4082-9c7b-001b24c264eb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.082479] env[62522]: DEBUG oslo_vmware.api [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Waiting for the task: (returnval){ [ 662.082479] env[62522]: value = "task-2415054" [ 662.082479] env[62522]: _type = "Task" [ 662.082479] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.091020] env[62522]: DEBUG oslo_vmware.api [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Task: {'id': task-2415054, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.179176] env[62522]: INFO nova.compute.manager [None req-6e2d71ba-010c-481e-9862-41ff005e9859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] instance snapshotting [ 662.185050] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38c2caed-468b-4d40-a499-a227bbf2eb5d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.206460] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4881f4d9-c57e-481c-9c8e-5090efb06e53 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.252240] env[62522]: DEBUG oslo_vmware.api [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': task-2415053, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074484} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.252240] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 662.252240] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b01340e-993d-4015-b96e-ff5e706a577f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.276264] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Reconfiguring VM instance instance-0000000a to attach disk [datastore2] 9a098809-cc26-4210-b09e-b7825c406294/9a098809-cc26-4210-b09e-b7825c406294.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 662.278072] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c3fde75-86ee-429d-b93d-ddbffb3509eb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.296936] env[62522]: DEBUG nova.compute.manager [req-32d78c7f-1f88-48fc-b174-b2f3578979fa req-afe429d7-c0c3-455a-a51e-90c11d8b30a2 service nova] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Received event network-vif-deleted-13658e84-5e72-4437-ab9f-9ca4363e4eff {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 662.306934] env[62522]: DEBUG oslo_vmware.api [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Waiting for the task: (returnval){ [ 662.306934] env[62522]: value = "task-2415055" [ 662.306934] env[62522]: _type = "Task" [ 662.306934] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.319727] env[62522]: DEBUG oslo_vmware.api [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': task-2415055, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.357798] env[62522]: DEBUG nova.network.neutron [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 662.508251] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance c181ce48-9fe2-4400-9047-f8b5a7159dd3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 662.510018] env[62522]: DEBUG oslo_vmware.api [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Task: {'id': task-2415051, 'name': PowerOnVM_Task, 'duration_secs': 0.69358} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.510348] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 662.514681] env[62522]: INFO nova.compute.manager [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Took 10.39 seconds to spawn the instance on the hypervisor. [ 662.514681] env[62522]: DEBUG nova.compute.manager [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 662.514681] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a891b5c-24aa-452f-9e40-1133275c9e81 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.517027] env[62522]: INFO nova.compute.manager [-] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Took 1.85 seconds to deallocate network for instance. [ 662.594203] env[62522]: DEBUG oslo_vmware.api [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Task: {'id': task-2415054, 'name': PowerOnVM_Task, 'duration_secs': 0.487168} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.594621] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 662.595090] env[62522]: INFO nova.compute.manager [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Took 7.85 seconds to spawn the instance on the hypervisor. [ 662.597923] env[62522]: DEBUG nova.compute.manager [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 662.597923] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2869b202-3cfc-4724-bbf8-1877150028e6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.621716] env[62522]: DEBUG nova.network.neutron [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Updating instance_info_cache with network_info: [{"id": "fd45a2e0-42d5-4bd8-89d5-73200646889d", "address": "fa:16:3e:02:d3:18", "network": {"id": "4cd63604-2f08-44bf-9d47-b408383e6296", "bridge": "br-int", "label": "tempest-ServersTestJSON-916427232-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c4c1e4d92254cbd89569f7c135646a5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd45a2e0-42", "ovs_interfaceid": "fd45a2e0-42d5-4bd8-89d5-73200646889d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.719767] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6e2d71ba-010c-481e-9862-41ff005e9859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Creating Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 662.719767] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-beec0054-b474-49ac-aeb7-64f7b7cb038f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.731177] env[62522]: DEBUG oslo_vmware.api [None req-6e2d71ba-010c-481e-9862-41ff005e9859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 662.731177] env[62522]: value = "task-2415056" [ 662.731177] env[62522]: _type = "Task" [ 662.731177] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.741850] env[62522]: DEBUG oslo_vmware.api [None req-6e2d71ba-010c-481e-9862-41ff005e9859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415056, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.826804] env[62522]: DEBUG oslo_vmware.api [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': task-2415055, 'name': ReconfigVM_Task, 'duration_secs': 0.356262} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.827177] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Reconfigured VM instance instance-0000000a to attach disk [datastore2] 9a098809-cc26-4210-b09e-b7825c406294/9a098809-cc26-4210-b09e-b7825c406294.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 662.828894] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d8ea5f23-5aeb-49db-9bb6-1e98d4acb8de {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.837313] env[62522]: DEBUG oslo_vmware.api [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Waiting for the task: (returnval){ [ 662.837313] env[62522]: value = "task-2415057" [ 662.837313] env[62522]: _type = "Task" [ 662.837313] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.848695] env[62522]: DEBUG oslo_vmware.api [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': task-2415057, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.019891] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 253a2903-2601-4f0a-8882-e7510406f9d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 663.034356] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 663.046018] env[62522]: INFO nova.compute.manager [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Took 26.34 seconds to build instance. [ 663.120179] env[62522]: INFO nova.compute.manager [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Took 20.37 seconds to build instance. [ 663.125664] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Releasing lock "refresh_cache-95e4fe36-6830-4fc4-bb53-1e5643c2f95b" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 663.128055] env[62522]: DEBUG nova.compute.manager [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Instance network_info: |[{"id": "fd45a2e0-42d5-4bd8-89d5-73200646889d", "address": "fa:16:3e:02:d3:18", "network": {"id": "4cd63604-2f08-44bf-9d47-b408383e6296", "bridge": "br-int", "label": "tempest-ServersTestJSON-916427232-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c4c1e4d92254cbd89569f7c135646a5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd45a2e0-42", "ovs_interfaceid": "fd45a2e0-42d5-4bd8-89d5-73200646889d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 663.128216] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:d3:18', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1f996252-e329-42bd-a897-446dfe2b81cd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fd45a2e0-42d5-4bd8-89d5-73200646889d', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 663.137612] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Creating folder: Project (9c4c1e4d92254cbd89569f7c135646a5). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 663.138413] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9b9314f5-b852-4b48-b4bc-137fb33f1a5e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.150673] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Created folder: Project (9c4c1e4d92254cbd89569f7c135646a5) in parent group-v489562. [ 663.150736] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Creating folder: Instances. Parent ref: group-v489600. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 663.151240] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d155461d-0e3b-47d0-abeb-b9d27b68bf97 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.162020] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Created folder: Instances in parent group-v489600. [ 663.162310] env[62522]: DEBUG oslo.service.loopingcall [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 663.162503] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 663.162706] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a528443e-d93e-498f-a2d6-1a559ae28bd2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.198338] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 663.198338] env[62522]: value = "task-2415060" [ 663.198338] env[62522]: _type = "Task" [ 663.198338] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.209666] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415060, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.249747] env[62522]: DEBUG oslo_vmware.api [None req-6e2d71ba-010c-481e-9862-41ff005e9859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415056, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.354253] env[62522]: DEBUG oslo_vmware.api [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': task-2415057, 'name': Rename_Task, 'duration_secs': 0.226985} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.354562] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 663.354810] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-56bca2ee-5396-466e-8456-6c05fd2ea4aa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.362662] env[62522]: DEBUG oslo_vmware.api [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Waiting for the task: (returnval){ [ 663.362662] env[62522]: value = "task-2415061" [ 663.362662] env[62522]: _type = "Task" [ 663.362662] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.378369] env[62522]: DEBUG oslo_vmware.api [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': task-2415061, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.523289] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 17e1557d-e4cf-45b0-84da-4cbcffe31fb6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 663.552574] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9123d9f-92fa-4942-9757-67e67ae644a0 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Lock "bf2ccaeb-610a-437b-be94-d3caefbe15c5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.906s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 663.623526] env[62522]: DEBUG oslo_concurrency.lockutils [None req-98d93782-3f1a-4235-8eff-9f9365addbfc tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Lock "a804f755-58b2-4350-8726-4e82f60afcdc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.395s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 663.710276] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415060, 'name': CreateVM_Task, 'duration_secs': 0.338007} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.710276] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 663.710276] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 663.710817] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.710817] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 663.713089] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e50e17a5-beae-4e24-af74-f2239eb6ac79 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.721533] env[62522]: DEBUG oslo_vmware.api [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Waiting for the task: (returnval){ [ 663.721533] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e6b5ac-6dae-d89f-d4b1-81b30e56fe54" [ 663.721533] env[62522]: _type = "Task" [ 663.721533] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.736455] env[62522]: DEBUG oslo_vmware.api [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e6b5ac-6dae-d89f-d4b1-81b30e56fe54, 'name': SearchDatastore_Task, 'duration_secs': 0.011789} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.741694] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 663.742008] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 663.742467] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 663.742632] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.742829] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 663.743184] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f6f0a18c-18d9-4db8-bf22-dd54dc1e4060 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.760576] env[62522]: DEBUG oslo_vmware.api [None req-6e2d71ba-010c-481e-9862-41ff005e9859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415056, 'name': CreateSnapshot_Task, 'duration_secs': 0.603601} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.763238] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6e2d71ba-010c-481e-9862-41ff005e9859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Created Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 663.763238] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 663.763238] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 663.764108] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9875649-57e5-4cc3-b856-a8788e090b15 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.768684] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01ca706c-d5fa-4cc8-9da3-c9ccabbeb151 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.782650] env[62522]: DEBUG oslo_vmware.api [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Waiting for the task: (returnval){ [ 663.782650] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52faab40-9e8d-8a07-cd20-2d32d8d5a15c" [ 663.782650] env[62522]: _type = "Task" [ 663.782650] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.794354] env[62522]: DEBUG oslo_vmware.api [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52faab40-9e8d-8a07-cd20-2d32d8d5a15c, 'name': SearchDatastore_Task, 'duration_secs': 0.010186} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.795774] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9dd42b84-b8b5-4edd-b240-29c254fe7713 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.804023] env[62522]: DEBUG oslo_vmware.api [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Waiting for the task: (returnval){ [ 663.804023] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]528ebf8c-a169-eb2a-46c3-303a9b42823a" [ 663.804023] env[62522]: _type = "Task" [ 663.804023] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.817498] env[62522]: DEBUG oslo_vmware.api [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]528ebf8c-a169-eb2a-46c3-303a9b42823a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.884407] env[62522]: DEBUG oslo_vmware.api [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': task-2415061, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.936787] env[62522]: DEBUG oslo_concurrency.lockutils [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Acquiring lock "194c1dd8-3b0a-4c29-9779-65f1534121d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 663.937286] env[62522]: DEBUG oslo_concurrency.lockutils [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Lock "194c1dd8-3b0a-4c29-9779-65f1534121d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 664.029293] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance cce5f0d4-364d-4295-a27d-44ca8585f803 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 664.056834] env[62522]: DEBUG nova.compute.manager [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 664.127480] env[62522]: DEBUG nova.compute.manager [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 664.294401] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6e2d71ba-010c-481e-9862-41ff005e9859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Creating linked-clone VM from snapshot {{(pid=62522) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 664.294754] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-18c31a13-f0df-4875-bcbf-bba191218486 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.304895] env[62522]: DEBUG oslo_vmware.api [None req-6e2d71ba-010c-481e-9862-41ff005e9859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 664.304895] env[62522]: value = "task-2415062" [ 664.304895] env[62522]: _type = "Task" [ 664.304895] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.317030] env[62522]: DEBUG oslo_vmware.api [None req-6e2d71ba-010c-481e-9862-41ff005e9859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415062, 'name': CloneVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.321615] env[62522]: DEBUG oslo_vmware.api [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]528ebf8c-a169-eb2a-46c3-303a9b42823a, 'name': SearchDatastore_Task, 'duration_secs': 0.009342} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.321908] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 664.322183] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 95e4fe36-6830-4fc4-bb53-1e5643c2f95b/95e4fe36-6830-4fc4-bb53-1e5643c2f95b.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 664.322673] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ce6c46f3-6b8b-49e6-b433-be75c8dda2c6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.328757] env[62522]: DEBUG oslo_vmware.api [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Waiting for the task: (returnval){ [ 664.328757] env[62522]: value = "task-2415063" [ 664.328757] env[62522]: _type = "Task" [ 664.328757] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.339034] env[62522]: DEBUG oslo_vmware.api [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Task: {'id': task-2415063, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.381204] env[62522]: DEBUG oslo_vmware.api [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': task-2415061, 'name': PowerOnVM_Task, 'duration_secs': 0.666065} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.381586] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 664.381787] env[62522]: DEBUG nova.compute.manager [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 664.382695] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4350d3a2-4571-40a6-81be-0cdad782e90f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.536581] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance cd69a052-369b-4809-baf0-a1aec44f4ab5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 664.588956] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 664.656144] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 664.826794] env[62522]: DEBUG oslo_vmware.api [None req-6e2d71ba-010c-481e-9862-41ff005e9859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415062, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.843533] env[62522]: DEBUG oslo_vmware.api [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Task: {'id': task-2415063, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.906878] env[62522]: DEBUG nova.compute.manager [req-2a39f111-6496-4be6-8150-f9f1593ae39e req-03a1cca8-a457-446f-a93a-7735d94366b2 service nova] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Received event network-changed-fd45a2e0-42d5-4bd8-89d5-73200646889d {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 664.906878] env[62522]: DEBUG nova.compute.manager [req-2a39f111-6496-4be6-8150-f9f1593ae39e req-03a1cca8-a457-446f-a93a-7735d94366b2 service nova] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Refreshing instance network info cache due to event network-changed-fd45a2e0-42d5-4bd8-89d5-73200646889d. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 664.906878] env[62522]: DEBUG oslo_concurrency.lockutils [req-2a39f111-6496-4be6-8150-f9f1593ae39e req-03a1cca8-a457-446f-a93a-7735d94366b2 service nova] Acquiring lock "refresh_cache-95e4fe36-6830-4fc4-bb53-1e5643c2f95b" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 664.906878] env[62522]: DEBUG oslo_concurrency.lockutils [req-2a39f111-6496-4be6-8150-f9f1593ae39e req-03a1cca8-a457-446f-a93a-7735d94366b2 service nova] Acquired lock "refresh_cache-95e4fe36-6830-4fc4-bb53-1e5643c2f95b" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.906878] env[62522]: DEBUG nova.network.neutron [req-2a39f111-6496-4be6-8150-f9f1593ae39e req-03a1cca8-a457-446f-a93a-7735d94366b2 service nova] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Refreshing network info cache for port fd45a2e0-42d5-4bd8-89d5-73200646889d {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 664.909254] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 665.042839] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 8461f823-e48a-42f0-8863-44177565b82d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 665.322219] env[62522]: DEBUG oslo_vmware.api [None req-6e2d71ba-010c-481e-9862-41ff005e9859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415062, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.348019] env[62522]: DEBUG oslo_vmware.api [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Task: {'id': task-2415063, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.821741} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.348019] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 95e4fe36-6830-4fc4-bb53-1e5643c2f95b/95e4fe36-6830-4fc4-bb53-1e5643c2f95b.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 665.348019] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 665.348568] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5f86a640-f2a5-4f7c-97d3-869f9b1b764b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.357736] env[62522]: DEBUG oslo_vmware.api [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Waiting for the task: (returnval){ [ 665.357736] env[62522]: value = "task-2415064" [ 665.357736] env[62522]: _type = "Task" [ 665.357736] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.370280] env[62522]: DEBUG oslo_vmware.api [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Task: {'id': task-2415064, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.544362] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance ae3e55b8-00c1-4dae-9276-f46a1e17b80e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 665.822170] env[62522]: DEBUG oslo_vmware.api [None req-6e2d71ba-010c-481e-9862-41ff005e9859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415062, 'name': CloneVM_Task, 'duration_secs': 1.334351} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.822170] env[62522]: INFO nova.virt.vmwareapi.vmops [None req-6e2d71ba-010c-481e-9862-41ff005e9859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Created linked-clone VM from snapshot [ 665.824030] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83960dff-bca5-47ca-a0f3-1ad99cb599de {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.836680] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-6e2d71ba-010c-481e-9862-41ff005e9859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Uploading image 95e15ed0-fd2e-43f9-bad7-0d851b0b7658 {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 665.859410] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e2d71ba-010c-481e-9862-41ff005e9859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Destroying the VM {{(pid=62522) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 665.859410] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-bd55f9a2-f5ae-420f-a8c6-a289025bf17e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.874139] env[62522]: DEBUG oslo_vmware.api [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Task: {'id': task-2415064, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069045} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.874616] env[62522]: DEBUG oslo_vmware.api [None req-6e2d71ba-010c-481e-9862-41ff005e9859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 665.874616] env[62522]: value = "task-2415065" [ 665.874616] env[62522]: _type = "Task" [ 665.874616] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.875281] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 665.876373] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f509491-0459-4660-95a8-42b358c11ab1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.890606] env[62522]: DEBUG oslo_vmware.api [None req-6e2d71ba-010c-481e-9862-41ff005e9859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415065, 'name': Destroy_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.912875] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] 95e4fe36-6830-4fc4-bb53-1e5643c2f95b/95e4fe36-6830-4fc4-bb53-1e5643c2f95b.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 665.915738] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c3633a6-80c7-4c0e-9aeb-308d2138503a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.943812] env[62522]: DEBUG oslo_vmware.api [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Waiting for the task: (returnval){ [ 665.943812] env[62522]: value = "task-2415066" [ 665.943812] env[62522]: _type = "Task" [ 665.943812] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.955516] env[62522]: DEBUG oslo_vmware.api [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Task: {'id': task-2415066, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.995733] env[62522]: DEBUG nova.network.neutron [req-2a39f111-6496-4be6-8150-f9f1593ae39e req-03a1cca8-a457-446f-a93a-7735d94366b2 service nova] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Updated VIF entry in instance network info cache for port fd45a2e0-42d5-4bd8-89d5-73200646889d. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 665.996104] env[62522]: DEBUG nova.network.neutron [req-2a39f111-6496-4be6-8150-f9f1593ae39e req-03a1cca8-a457-446f-a93a-7735d94366b2 service nova] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Updating instance_info_cache with network_info: [{"id": "fd45a2e0-42d5-4bd8-89d5-73200646889d", "address": "fa:16:3e:02:d3:18", "network": {"id": "4cd63604-2f08-44bf-9d47-b408383e6296", "bridge": "br-int", "label": "tempest-ServersTestJSON-916427232-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c4c1e4d92254cbd89569f7c135646a5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd45a2e0-42", "ovs_interfaceid": "fd45a2e0-42d5-4bd8-89d5-73200646889d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.047686] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 87a90c88-6e0a-4051-8978-b2f9c5a876ca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 666.048051] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=62522) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 666.048179] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2048MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=62522) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 666.389012] env[62522]: DEBUG oslo_vmware.api [None req-6e2d71ba-010c-481e-9862-41ff005e9859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415065, 'name': Destroy_Task, 'duration_secs': 0.388928} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.396217] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-6e2d71ba-010c-481e-9862-41ff005e9859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Destroyed the VM [ 666.396313] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6e2d71ba-010c-481e-9862-41ff005e9859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Deleting Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 666.396724] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-1e0f2f1c-108f-49e4-9d0e-946c68b646e9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.403920] env[62522]: DEBUG oslo_vmware.api [None req-6e2d71ba-010c-481e-9862-41ff005e9859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 666.403920] env[62522]: value = "task-2415067" [ 666.403920] env[62522]: _type = "Task" [ 666.403920] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.414114] env[62522]: DEBUG oslo_vmware.api [None req-6e2d71ba-010c-481e-9862-41ff005e9859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415067, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.455903] env[62522]: DEBUG oslo_vmware.api [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Task: {'id': task-2415066, 'name': ReconfigVM_Task, 'duration_secs': 0.358591} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.455903] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Reconfigured VM instance instance-0000000e to attach disk [datastore1] 95e4fe36-6830-4fc4-bb53-1e5643c2f95b/95e4fe36-6830-4fc4-bb53-1e5643c2f95b.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 666.456482] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-164bf8aa-c5a4-4848-a3fe-e0efc8733561 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.466475] env[62522]: DEBUG oslo_vmware.api [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Waiting for the task: (returnval){ [ 666.466475] env[62522]: value = "task-2415068" [ 666.466475] env[62522]: _type = "Task" [ 666.466475] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.479770] env[62522]: DEBUG oslo_vmware.api [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Task: {'id': task-2415068, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.503429] env[62522]: DEBUG oslo_concurrency.lockutils [req-2a39f111-6496-4be6-8150-f9f1593ae39e req-03a1cca8-a457-446f-a93a-7735d94366b2 service nova] Releasing lock "refresh_cache-95e4fe36-6830-4fc4-bb53-1e5643c2f95b" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 666.639492] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f1925c6-0724-4918-95c9-285a77285c39 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.651507] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-765be019-770b-4b67-b467-acf62cccc51e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.680503] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c37df868-fdc2-40c6-9366-94d781417219 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.688629] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd521ad9-6f8e-4320-8907-89daf66a55ce {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.707156] env[62522]: DEBUG nova.compute.provider_tree [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 666.707458] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Acquiring lock "84ad5317-344d-44c1-9318-fa1574321296" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 666.707714] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Lock "84ad5317-344d-44c1-9318-fa1574321296" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 666.707951] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Acquiring lock "84ad5317-344d-44c1-9318-fa1574321296-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 666.708232] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Lock "84ad5317-344d-44c1-9318-fa1574321296-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 666.708453] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Lock "84ad5317-344d-44c1-9318-fa1574321296-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 666.710795] env[62522]: INFO nova.compute.manager [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Terminating instance [ 666.750765] env[62522]: INFO nova.compute.manager [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Rebuilding instance [ 666.808098] env[62522]: DEBUG nova.compute.manager [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 666.809198] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf198877-cb3e-42ec-8da6-0732c3fcb14a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.916385] env[62522]: DEBUG oslo_vmware.api [None req-6e2d71ba-010c-481e-9862-41ff005e9859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415067, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.976715] env[62522]: DEBUG oslo_vmware.api [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Task: {'id': task-2415068, 'name': Rename_Task, 'duration_secs': 0.141397} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.976938] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 666.977174] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-319cb163-b963-4bc8-aa40-f66412cada36 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.984038] env[62522]: DEBUG oslo_vmware.api [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Waiting for the task: (returnval){ [ 666.984038] env[62522]: value = "task-2415069" [ 666.984038] env[62522]: _type = "Task" [ 666.984038] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.996763] env[62522]: DEBUG oslo_vmware.api [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Task: {'id': task-2415069, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.061907] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7937a354-61d5-4466-b8ba-ce0d7b79ece3 tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Acquiring lock "a804f755-58b2-4350-8726-4e82f60afcdc" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.062377] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7937a354-61d5-4466-b8ba-ce0d7b79ece3 tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Lock "a804f755-58b2-4350-8726-4e82f60afcdc" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 667.062461] env[62522]: INFO nova.compute.manager [None req-7937a354-61d5-4466-b8ba-ce0d7b79ece3 tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Rebooting instance [ 667.212232] env[62522]: DEBUG nova.scheduler.client.report [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 667.217947] env[62522]: DEBUG nova.compute.manager [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 667.218261] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 667.223604] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808814e5-ec57-400c-bdd0-ac94bad27af5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.234073] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 667.234562] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c3c21432-a044-4d0e-a2a0-714d8f622566 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.243332] env[62522]: DEBUG oslo_vmware.api [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Waiting for the task: (returnval){ [ 667.243332] env[62522]: value = "task-2415070" [ 667.243332] env[62522]: _type = "Task" [ 667.243332] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.253966] env[62522]: DEBUG oslo_vmware.api [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Task: {'id': task-2415070, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.417351] env[62522]: DEBUG oslo_vmware.api [None req-6e2d71ba-010c-481e-9862-41ff005e9859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415067, 'name': RemoveSnapshot_Task, 'duration_secs': 0.573461} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.417719] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6e2d71ba-010c-481e-9862-41ff005e9859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Deleted Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 667.495801] env[62522]: DEBUG oslo_vmware.api [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Task: {'id': task-2415069, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.605681] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7937a354-61d5-4466-b8ba-ce0d7b79ece3 tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Acquiring lock "refresh_cache-a804f755-58b2-4350-8726-4e82f60afcdc" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 667.607701] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7937a354-61d5-4466-b8ba-ce0d7b79ece3 tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Acquired lock "refresh_cache-a804f755-58b2-4350-8726-4e82f60afcdc" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.607701] env[62522]: DEBUG nova.network.neutron [None req-7937a354-61d5-4466-b8ba-ce0d7b79ece3 tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 667.670696] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Acquiring lock "5b69254a-b34b-48ff-a96c-d8573c9abf3b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.670857] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Lock "5b69254a-b34b-48ff-a96c-d8573c9abf3b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 667.719430] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62522) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 667.719430] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 10.336s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 667.719430] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.395s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 667.721377] env[62522]: INFO nova.compute.claims [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 667.753499] env[62522]: DEBUG oslo_vmware.api [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Task: {'id': task-2415070, 'name': PowerOffVM_Task, 'duration_secs': 0.317819} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.753795] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 667.753875] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 667.754698] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-75b1f284-0871-4e99-a29c-0005c37b9959 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.817036] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Acquiring lock "d266aff3-42b4-4dcb-b8ca-7c13cdf8d314" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.817285] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Lock "d266aff3-42b4-4dcb-b8ca-7c13cdf8d314" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 667.825203] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 667.825360] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 667.825538] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Deleting the datastore file [datastore2] 84ad5317-344d-44c1-9318-fa1574321296 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 667.826210] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 667.826253] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-611c9415-9960-404f-8855-2c7881cfe79c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.831190] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8f04111b-6e4a-44d8-8114-2c448b1790ff {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.839809] env[62522]: DEBUG oslo_vmware.api [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Waiting for the task: (returnval){ [ 667.839809] env[62522]: value = "task-2415072" [ 667.839809] env[62522]: _type = "Task" [ 667.839809] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.842330] env[62522]: DEBUG oslo_vmware.api [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Waiting for the task: (returnval){ [ 667.842330] env[62522]: value = "task-2415073" [ 667.842330] env[62522]: _type = "Task" [ 667.842330] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.856053] env[62522]: DEBUG oslo_vmware.api [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Task: {'id': task-2415072, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.861357] env[62522]: DEBUG oslo_vmware.api [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Task: {'id': task-2415073, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.923734] env[62522]: WARNING nova.compute.manager [None req-6e2d71ba-010c-481e-9862-41ff005e9859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Image not found during snapshot: nova.exception.ImageNotFound: Image 95e15ed0-fd2e-43f9-bad7-0d851b0b7658 could not be found. [ 667.996475] env[62522]: DEBUG oslo_vmware.api [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Task: {'id': task-2415069, 'name': PowerOnVM_Task, 'duration_secs': 0.636008} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.996714] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 667.996980] env[62522]: INFO nova.compute.manager [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Took 8.57 seconds to spawn the instance on the hypervisor. [ 667.997114] env[62522]: DEBUG nova.compute.manager [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 667.999576] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5298f10-98f7-458e-806f-2afde5aebdab {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.359689] env[62522]: DEBUG nova.compute.manager [req-ca006c64-98b6-49a1-a391-ac863abd60f9 req-6f611bdb-c6f4-496f-8287-8fa219426857 service nova] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Received event network-changed-fd0b859a-1918-4692-a81c-b2b0e41951a5 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 668.360206] env[62522]: DEBUG nova.compute.manager [req-ca006c64-98b6-49a1-a391-ac863abd60f9 req-6f611bdb-c6f4-496f-8287-8fa219426857 service nova] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Refreshing instance network info cache due to event network-changed-fd0b859a-1918-4692-a81c-b2b0e41951a5. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 668.360615] env[62522]: DEBUG oslo_concurrency.lockutils [req-ca006c64-98b6-49a1-a391-ac863abd60f9 req-6f611bdb-c6f4-496f-8287-8fa219426857 service nova] Acquiring lock "refresh_cache-bf2ccaeb-610a-437b-be94-d3caefbe15c5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 668.360883] env[62522]: DEBUG oslo_concurrency.lockutils [req-ca006c64-98b6-49a1-a391-ac863abd60f9 req-6f611bdb-c6f4-496f-8287-8fa219426857 service nova] Acquired lock "refresh_cache-bf2ccaeb-610a-437b-be94-d3caefbe15c5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.361124] env[62522]: DEBUG nova.network.neutron [req-ca006c64-98b6-49a1-a391-ac863abd60f9 req-6f611bdb-c6f4-496f-8287-8fa219426857 service nova] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Refreshing network info cache for port fd0b859a-1918-4692-a81c-b2b0e41951a5 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 668.372043] env[62522]: DEBUG oslo_vmware.api [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Task: {'id': task-2415072, 'name': PowerOffVM_Task, 'duration_secs': 0.201666} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.372677] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 668.372954] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 668.380355] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f8aa7bb-fe4f-48f1-916a-39a8c840c20c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.387021] env[62522]: DEBUG oslo_vmware.api [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Task: {'id': task-2415073, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.376207} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.391695] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 668.391925] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 668.392122] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 668.392317] env[62522]: INFO nova.compute.manager [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Took 1.17 seconds to destroy the instance on the hypervisor. [ 668.392576] env[62522]: DEBUG oslo.service.loopingcall [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 668.393621] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Acquiring lock "c1fd078c-61d4-4c0f-8c49-0f56a926a087" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 668.393817] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Lock "c1fd078c-61d4-4c0f-8c49-0f56a926a087" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 668.394050] env[62522]: DEBUG nova.compute.manager [-] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 668.394113] env[62522]: DEBUG nova.network.neutron [-] [instance: 84ad5317-344d-44c1-9318-fa1574321296] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 668.398394] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 668.399137] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bd87cd91-e2c8-435f-a0f5-7af5081c93ea {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.423565] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 668.423777] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 668.424037] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Deleting the datastore file [datastore2] 9a098809-cc26-4210-b09e-b7825c406294 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 668.424310] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c635645-a500-49be-949a-4962568b8e65 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.432074] env[62522]: DEBUG oslo_vmware.api [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Waiting for the task: (returnval){ [ 668.432074] env[62522]: value = "task-2415075" [ 668.432074] env[62522]: _type = "Task" [ 668.432074] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.439671] env[62522]: DEBUG oslo_vmware.api [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Task: {'id': task-2415075, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.529197] env[62522]: INFO nova.compute.manager [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Took 23.96 seconds to build instance. [ 668.567733] env[62522]: DEBUG nova.network.neutron [None req-7937a354-61d5-4466-b8ba-ce0d7b79ece3 tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Updating instance_info_cache with network_info: [{"id": "e05da487-a40f-44d2-a390-d0795275ff10", "address": "fa:16:3e:6a:93:32", "network": {"id": "0c30d1a3-4c52-426c-a107-95453e519e90", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-991475332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "707c18dc3f934d35b85e59f08ea537ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape05da487-a4", "ovs_interfaceid": "e05da487-a40f-44d2-a390-d0795275ff10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.950781] env[62522]: DEBUG oslo_vmware.api [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Task: {'id': task-2415075, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.355258} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.954799] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 668.954927] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 668.955029] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 669.030505] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0b7919c5-e293-49ac-a3aa-2c5d1bcdfa6b tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Lock "95e4fe36-6830-4fc4-bb53-1e5643c2f95b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.908s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 669.075470] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7937a354-61d5-4466-b8ba-ce0d7b79ece3 tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Releasing lock "refresh_cache-a804f755-58b2-4350-8726-4e82f60afcdc" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 669.222797] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-404bdcfc-d5a5-4254-84f7-9642b549992d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.230961] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b4aff23-170f-4312-9334-16d19d9ec71a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.263769] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-243f8832-f937-4796-a788-da14656431eb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.271228] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0bf0c66-f69c-4156-8290-ff3de2131780 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.277657] env[62522]: DEBUG nova.network.neutron [req-ca006c64-98b6-49a1-a391-ac863abd60f9 req-6f611bdb-c6f4-496f-8287-8fa219426857 service nova] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Updated VIF entry in instance network info cache for port fd0b859a-1918-4692-a81c-b2b0e41951a5. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 669.278025] env[62522]: DEBUG nova.network.neutron [req-ca006c64-98b6-49a1-a391-ac863abd60f9 req-6f611bdb-c6f4-496f-8287-8fa219426857 service nova] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Updating instance_info_cache with network_info: [{"id": "fd0b859a-1918-4692-a81c-b2b0e41951a5", "address": "fa:16:3e:fc:a5:bb", "network": {"id": "bfcd05f5-bbfb-49bd-9cf7-5fca1d018b65", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-147366971-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83e5b02095fc42ea9b8a2fb7c4900fe3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e99c063c-0cb7-4db6-b077-114166cfe889", "external-id": "nsx-vlan-transportzone-462", "segmentation_id": 462, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd0b859a-19", "ovs_interfaceid": "fd0b859a-1918-4692-a81c-b2b0e41951a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.294211] env[62522]: DEBUG nova.compute.provider_tree [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 669.308021] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Acquiring lock "e813e7da-fd2c-4f10-b2f3-1e2b5c153a19" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 669.308021] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Lock "e813e7da-fd2c-4f10-b2f3-1e2b5c153a19" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 669.324179] env[62522]: DEBUG nova.network.neutron [-] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.443506] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquiring lock "74b6ae10-a595-4139-8eda-38fe1aa298cf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 669.443506] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Lock "74b6ae10-a595-4139-8eda-38fe1aa298cf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 669.443506] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquiring lock "74b6ae10-a595-4139-8eda-38fe1aa298cf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 669.443506] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Lock "74b6ae10-a595-4139-8eda-38fe1aa298cf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 669.444046] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Lock "74b6ae10-a595-4139-8eda-38fe1aa298cf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 669.444712] env[62522]: INFO nova.compute.manager [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Terminating instance [ 669.532785] env[62522]: DEBUG nova.compute.manager [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 669.583068] env[62522]: DEBUG nova.compute.manager [None req-7937a354-61d5-4466-b8ba-ce0d7b79ece3 tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 669.583068] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13538834-cd9b-4ba4-97cb-1d218f082678 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.781467] env[62522]: DEBUG oslo_concurrency.lockutils [req-ca006c64-98b6-49a1-a391-ac863abd60f9 req-6f611bdb-c6f4-496f-8287-8fa219426857 service nova] Releasing lock "refresh_cache-bf2ccaeb-610a-437b-be94-d3caefbe15c5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 669.803009] env[62522]: DEBUG nova.scheduler.client.report [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 669.827561] env[62522]: INFO nova.compute.manager [-] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Took 1.43 seconds to deallocate network for instance. [ 669.948604] env[62522]: DEBUG nova.compute.manager [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 669.948744] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 669.949907] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4db7b7b0-a133-46fe-b04c-b6426dc24a08 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.957886] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 669.958157] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-25c2b70b-fe31-43a4-a605-f336402f3f68 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.967487] env[62522]: DEBUG oslo_vmware.api [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 669.967487] env[62522]: value = "task-2415076" [ 669.967487] env[62522]: _type = "Task" [ 669.967487] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.976641] env[62522]: DEBUG oslo_vmware.api [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415076, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.991690] env[62522]: DEBUG nova.virt.hardware [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 669.991935] env[62522]: DEBUG nova.virt.hardware [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 669.992100] env[62522]: DEBUG nova.virt.hardware [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 669.992285] env[62522]: DEBUG nova.virt.hardware [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 669.992425] env[62522]: DEBUG nova.virt.hardware [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 669.992565] env[62522]: DEBUG nova.virt.hardware [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 669.992771] env[62522]: DEBUG nova.virt.hardware [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 669.993312] env[62522]: DEBUG nova.virt.hardware [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 669.993637] env[62522]: DEBUG nova.virt.hardware [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 669.993813] env[62522]: DEBUG nova.virt.hardware [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 669.994241] env[62522]: DEBUG nova.virt.hardware [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 669.995145] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b121ad92-23ec-4367-8f64-9700bb156c23 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.004026] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-953e4050-84e0-4f8e-907c-e7dc0f182882 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.018966] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Instance VIF info [] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 670.025015] env[62522]: DEBUG oslo.service.loopingcall [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 670.025309] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 670.025530] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8f8ccfb0-6fb7-460c-9825-509a1456bc29 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.045115] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 670.045115] env[62522]: value = "task-2415077" [ 670.045115] env[62522]: _type = "Task" [ 670.045115] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.055169] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415077, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.058312] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.310666] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.591s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 670.311748] env[62522]: DEBUG nova.compute.manager [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 670.315071] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.359s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.316632] env[62522]: INFO nova.compute.claims [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 670.340878] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.478209] env[62522]: DEBUG oslo_vmware.api [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415076, 'name': PowerOffVM_Task, 'duration_secs': 0.251372} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.479352] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 670.479352] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 670.479352] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fed7972b-eb6c-4396-9f12-62d111935af7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.554684] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415077, 'name': CreateVM_Task, 'duration_secs': 0.3011} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.554870] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 670.555320] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 670.556632] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.556632] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 670.556632] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1cda6616-bb97-40d5-a499-fac70e9f5050 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.561567] env[62522]: DEBUG oslo_vmware.api [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Waiting for the task: (returnval){ [ 670.561567] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521558fb-2673-db97-3db4-d3fc7594a93f" [ 670.561567] env[62522]: _type = "Task" [ 670.561567] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.562887] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 670.563124] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 670.563334] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Deleting the datastore file [datastore1] 74b6ae10-a595-4139-8eda-38fe1aa298cf {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 670.566507] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-12f7751b-c573-4f7e-81d0-38ceb44b20b6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.575153] env[62522]: DEBUG oslo_vmware.api [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521558fb-2673-db97-3db4-d3fc7594a93f, 'name': SearchDatastore_Task, 'duration_secs': 0.009824} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.575432] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 670.575673] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 670.575898] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 670.576106] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.576236] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 670.576551] env[62522]: DEBUG oslo_vmware.api [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 670.576551] env[62522]: value = "task-2415079" [ 670.576551] env[62522]: _type = "Task" [ 670.576551] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.576731] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0ed6cf5e-0ab1-4aaa-a921-6d0bc472d27c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.586037] env[62522]: DEBUG oslo_vmware.api [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415079, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.591767] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 670.591856] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 670.592516] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5cf54c3d-6599-41d5-885e-90b56d7a8b3c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.597456] env[62522]: DEBUG oslo_vmware.api [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Waiting for the task: (returnval){ [ 670.597456] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d4d055-c2d1-d9aa-bd4a-45a128ec50fb" [ 670.597456] env[62522]: _type = "Task" [ 670.597456] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.602025] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce0b7d42-d86b-4dad-bf77-8ad59ba13176 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.610068] env[62522]: DEBUG oslo_vmware.api [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d4d055-c2d1-d9aa-bd4a-45a128ec50fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.613807] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7937a354-61d5-4466-b8ba-ce0d7b79ece3 tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Doing hard reboot of VM {{(pid=62522) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 670.614056] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-e96f23cb-0608-4664-96f2-c7a1024bfc6f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.619104] env[62522]: DEBUG oslo_vmware.api [None req-7937a354-61d5-4466-b8ba-ce0d7b79ece3 tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Waiting for the task: (returnval){ [ 670.619104] env[62522]: value = "task-2415080" [ 670.619104] env[62522]: _type = "Task" [ 670.619104] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.628142] env[62522]: DEBUG oslo_vmware.api [None req-7937a354-61d5-4466-b8ba-ce0d7b79ece3 tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Task: {'id': task-2415080, 'name': ResetVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.823874] env[62522]: DEBUG nova.compute.utils [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 670.828208] env[62522]: DEBUG nova.compute.manager [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 670.828334] env[62522]: DEBUG nova.network.neutron [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 670.853110] env[62522]: DEBUG nova.compute.manager [req-83067fb5-e037-451b-ba4c-f7f7b9c8aee8 req-26554fba-68b8-4697-8052-c26a2afd44d7 service nova] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Received event network-vif-deleted-84b3fbe6-d792-4953-8bdc-9befaa3ed8f5 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 670.897116] env[62522]: DEBUG nova.policy [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f8c88488ce86402e96e887f1c3eba84a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f3b97af192e04ccfa14c4ee30eb6d056', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 671.090049] env[62522]: DEBUG oslo_vmware.api [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415079, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.12589} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.090049] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 671.090049] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 671.091114] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 671.091114] env[62522]: INFO nova.compute.manager [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Took 1.14 seconds to destroy the instance on the hypervisor. [ 671.091379] env[62522]: DEBUG oslo.service.loopingcall [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 671.091682] env[62522]: DEBUG nova.compute.manager [-] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 671.091885] env[62522]: DEBUG nova.network.neutron [-] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 671.109406] env[62522]: DEBUG oslo_vmware.api [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d4d055-c2d1-d9aa-bd4a-45a128ec50fb, 'name': SearchDatastore_Task, 'duration_secs': 0.008257} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.109406] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0074a1c6-2c5a-4612-b13f-3918fa3f9fb4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.113416] env[62522]: DEBUG oslo_vmware.api [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Waiting for the task: (returnval){ [ 671.113416] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5267c620-c4a9-5f9a-7936-a5f52728ac64" [ 671.113416] env[62522]: _type = "Task" [ 671.113416] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.126186] env[62522]: DEBUG oslo_vmware.api [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5267c620-c4a9-5f9a-7936-a5f52728ac64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.131518] env[62522]: DEBUG oslo_vmware.api [None req-7937a354-61d5-4466-b8ba-ce0d7b79ece3 tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Task: {'id': task-2415080, 'name': ResetVM_Task, 'duration_secs': 0.108861} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.131518] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7937a354-61d5-4466-b8ba-ce0d7b79ece3 tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Did hard reboot of VM {{(pid=62522) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 671.131518] env[62522]: DEBUG nova.compute.manager [None req-7937a354-61d5-4466-b8ba-ce0d7b79ece3 tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 671.132337] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f31db74a-ab59-44e9-9d32-ba3a5fef079d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.233631] env[62522]: DEBUG nova.network.neutron [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Successfully created port: 907f7f2d-f620-423c-bd77-a4685802e879 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 671.332777] env[62522]: DEBUG nova.compute.manager [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 671.626066] env[62522]: DEBUG oslo_vmware.api [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5267c620-c4a9-5f9a-7936-a5f52728ac64, 'name': SearchDatastore_Task, 'duration_secs': 0.01738} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.626793] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 671.627073] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 9a098809-cc26-4210-b09e-b7825c406294/9a098809-cc26-4210-b09e-b7825c406294.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 671.627332] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-189356ff-1981-4ad1-8351-e36e0fd347ab {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.633925] env[62522]: DEBUG oslo_vmware.api [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Waiting for the task: (returnval){ [ 671.633925] env[62522]: value = "task-2415081" [ 671.633925] env[62522]: _type = "Task" [ 671.633925] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.642166] env[62522]: DEBUG oslo_vmware.api [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Task: {'id': task-2415081, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.649377] env[62522]: DEBUG nova.compute.manager [req-fefa2666-3e65-47bf-acd5-fa61c8470a0e req-b6408407-d8fc-400c-8d75-e4b1d6ccbf62 service nova] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Received event network-changed-fd45a2e0-42d5-4bd8-89d5-73200646889d {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 671.649594] env[62522]: DEBUG nova.compute.manager [req-fefa2666-3e65-47bf-acd5-fa61c8470a0e req-b6408407-d8fc-400c-8d75-e4b1d6ccbf62 service nova] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Refreshing instance network info cache due to event network-changed-fd45a2e0-42d5-4bd8-89d5-73200646889d. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 671.649817] env[62522]: DEBUG oslo_concurrency.lockutils [req-fefa2666-3e65-47bf-acd5-fa61c8470a0e req-b6408407-d8fc-400c-8d75-e4b1d6ccbf62 service nova] Acquiring lock "refresh_cache-95e4fe36-6830-4fc4-bb53-1e5643c2f95b" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 671.649965] env[62522]: DEBUG oslo_concurrency.lockutils [req-fefa2666-3e65-47bf-acd5-fa61c8470a0e req-b6408407-d8fc-400c-8d75-e4b1d6ccbf62 service nova] Acquired lock "refresh_cache-95e4fe36-6830-4fc4-bb53-1e5643c2f95b" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.650151] env[62522]: DEBUG nova.network.neutron [req-fefa2666-3e65-47bf-acd5-fa61c8470a0e req-b6408407-d8fc-400c-8d75-e4b1d6ccbf62 service nova] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Refreshing network info cache for port fd45a2e0-42d5-4bd8-89d5-73200646889d {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 671.652443] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7937a354-61d5-4466-b8ba-ce0d7b79ece3 tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Lock "a804f755-58b2-4350-8726-4e82f60afcdc" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.590s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 671.903413] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64cad474-1c0c-4904-bff3-ad3121612e00 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.912447] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfaef643-adeb-4808-aba0-11123c035cd0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.947098] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb472324-5262-442d-9eb3-1d6463f1a6d0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.955415] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30616da1-26ab-4784-b5f1-16f5aec9c996 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.969682] env[62522]: DEBUG nova.compute.provider_tree [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 672.011379] env[62522]: DEBUG oslo_concurrency.lockutils [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "ebca687d-4de7-4fd6-99fb-b4f0154abe9c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 672.011379] env[62522]: DEBUG oslo_concurrency.lockutils [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "ebca687d-4de7-4fd6-99fb-b4f0154abe9c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 672.071925] env[62522]: DEBUG nova.network.neutron [-] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.147075] env[62522]: DEBUG oslo_vmware.api [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Task: {'id': task-2415081, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.343955] env[62522]: DEBUG nova.compute.manager [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 672.369242] env[62522]: DEBUG nova.virt.hardware [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 672.369482] env[62522]: DEBUG nova.virt.hardware [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 672.369642] env[62522]: DEBUG nova.virt.hardware [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 672.369822] env[62522]: DEBUG nova.virt.hardware [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 672.369968] env[62522]: DEBUG nova.virt.hardware [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 672.370223] env[62522]: DEBUG nova.virt.hardware [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 672.370452] env[62522]: DEBUG nova.virt.hardware [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 672.370613] env[62522]: DEBUG nova.virt.hardware [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 672.370781] env[62522]: DEBUG nova.virt.hardware [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 672.370944] env[62522]: DEBUG nova.virt.hardware [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 672.371139] env[62522]: DEBUG nova.virt.hardware [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 672.372072] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9505e3d-d107-4cea-b0f8-29b0336ffbf2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.381801] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2233c25a-387f-4087-ae34-ac49bda92492 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.473870] env[62522]: DEBUG nova.scheduler.client.report [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 672.574702] env[62522]: INFO nova.compute.manager [-] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Took 1.48 seconds to deallocate network for instance. [ 672.649907] env[62522]: DEBUG oslo_vmware.api [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Task: {'id': task-2415081, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.690078} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.650610] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 9a098809-cc26-4210-b09e-b7825c406294/9a098809-cc26-4210-b09e-b7825c406294.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 672.650610] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 672.650821] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-be1cfcef-7d1e-438a-a390-b74e838c01a4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.660489] env[62522]: DEBUG oslo_vmware.api [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Waiting for the task: (returnval){ [ 672.660489] env[62522]: value = "task-2415082" [ 672.660489] env[62522]: _type = "Task" [ 672.660489] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.672383] env[62522]: DEBUG oslo_vmware.api [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Task: {'id': task-2415082, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.797576] env[62522]: DEBUG nova.network.neutron [req-fefa2666-3e65-47bf-acd5-fa61c8470a0e req-b6408407-d8fc-400c-8d75-e4b1d6ccbf62 service nova] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Updated VIF entry in instance network info cache for port fd45a2e0-42d5-4bd8-89d5-73200646889d. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 672.798024] env[62522]: DEBUG nova.network.neutron [req-fefa2666-3e65-47bf-acd5-fa61c8470a0e req-b6408407-d8fc-400c-8d75-e4b1d6ccbf62 service nova] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Updating instance_info_cache with network_info: [{"id": "fd45a2e0-42d5-4bd8-89d5-73200646889d", "address": "fa:16:3e:02:d3:18", "network": {"id": "4cd63604-2f08-44bf-9d47-b408383e6296", "bridge": "br-int", "label": "tempest-ServersTestJSON-916427232-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.172", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c4c1e4d92254cbd89569f7c135646a5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd45a2e0-42", "ovs_interfaceid": "fd45a2e0-42d5-4bd8-89d5-73200646889d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.893318] env[62522]: DEBUG nova.network.neutron [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Successfully updated port: 907f7f2d-f620-423c-bd77-a4685802e879 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 672.979504] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.665s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 672.980088] env[62522]: DEBUG nova.compute.manager [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 672.982766] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.701s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 672.984243] env[62522]: INFO nova.compute.claims [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 673.082634] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 673.162061] env[62522]: DEBUG nova.compute.manager [req-1a24d738-43b7-4aab-ba95-58773a43eefc req-f5840900-f270-4eb4-8f4f-ba868d011f1f service nova] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Received event network-vif-plugged-907f7f2d-f620-423c-bd77-a4685802e879 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 673.162360] env[62522]: DEBUG oslo_concurrency.lockutils [req-1a24d738-43b7-4aab-ba95-58773a43eefc req-f5840900-f270-4eb4-8f4f-ba868d011f1f service nova] Acquiring lock "4de70165-c28f-44b7-a01a-caa0787170b8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 673.162483] env[62522]: DEBUG oslo_concurrency.lockutils [req-1a24d738-43b7-4aab-ba95-58773a43eefc req-f5840900-f270-4eb4-8f4f-ba868d011f1f service nova] Lock "4de70165-c28f-44b7-a01a-caa0787170b8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 673.162701] env[62522]: DEBUG oslo_concurrency.lockutils [req-1a24d738-43b7-4aab-ba95-58773a43eefc req-f5840900-f270-4eb4-8f4f-ba868d011f1f service nova] Lock "4de70165-c28f-44b7-a01a-caa0787170b8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 673.162910] env[62522]: DEBUG nova.compute.manager [req-1a24d738-43b7-4aab-ba95-58773a43eefc req-f5840900-f270-4eb4-8f4f-ba868d011f1f service nova] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] No waiting events found dispatching network-vif-plugged-907f7f2d-f620-423c-bd77-a4685802e879 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 673.163462] env[62522]: WARNING nova.compute.manager [req-1a24d738-43b7-4aab-ba95-58773a43eefc req-f5840900-f270-4eb4-8f4f-ba868d011f1f service nova] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Received unexpected event network-vif-plugged-907f7f2d-f620-423c-bd77-a4685802e879 for instance with vm_state building and task_state spawning. [ 673.174211] env[62522]: DEBUG oslo_vmware.api [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Task: {'id': task-2415082, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065405} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.174472] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 673.175370] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc5539a3-1616-49f0-b0ee-dc93c0a5b87e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.196390] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Reconfiguring VM instance instance-0000000a to attach disk [datastore2] 9a098809-cc26-4210-b09e-b7825c406294/9a098809-cc26-4210-b09e-b7825c406294.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 673.196703] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ed526254-eafc-44dd-9aa6-f343fde74f43 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.221206] env[62522]: DEBUG oslo_vmware.api [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Waiting for the task: (returnval){ [ 673.221206] env[62522]: value = "task-2415083" [ 673.221206] env[62522]: _type = "Task" [ 673.221206] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.230562] env[62522]: DEBUG oslo_vmware.api [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Task: {'id': task-2415083, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.304067] env[62522]: DEBUG oslo_concurrency.lockutils [req-fefa2666-3e65-47bf-acd5-fa61c8470a0e req-b6408407-d8fc-400c-8d75-e4b1d6ccbf62 service nova] Releasing lock "refresh_cache-95e4fe36-6830-4fc4-bb53-1e5643c2f95b" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 673.402123] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Acquiring lock "refresh_cache-4de70165-c28f-44b7-a01a-caa0787170b8" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 673.402123] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Acquired lock "refresh_cache-4de70165-c28f-44b7-a01a-caa0787170b8" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.405377] env[62522]: DEBUG nova.network.neutron [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 673.488921] env[62522]: DEBUG nova.compute.utils [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 673.492272] env[62522]: DEBUG nova.compute.manager [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Not allocating networking since 'none' was specified. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 673.735123] env[62522]: DEBUG oslo_vmware.api [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Task: {'id': task-2415083, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.901016] env[62522]: DEBUG nova.compute.manager [req-df13afa0-9d6a-4871-89e6-7bcca51150a9 req-e72bf6e4-3095-4916-8440-505162245573 service nova] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Received event network-vif-deleted-d312748e-14f2-4467-bf2a-2f6479f774f0 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 673.901788] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Acquiring lock "a804f755-58b2-4350-8726-4e82f60afcdc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 673.906018] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Lock "a804f755-58b2-4350-8726-4e82f60afcdc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 673.906018] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Acquiring lock "a804f755-58b2-4350-8726-4e82f60afcdc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 673.906018] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Lock "a804f755-58b2-4350-8726-4e82f60afcdc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 673.906018] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Lock "a804f755-58b2-4350-8726-4e82f60afcdc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 673.907527] env[62522]: INFO nova.compute.manager [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Terminating instance [ 673.947771] env[62522]: DEBUG nova.network.neutron [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 673.993932] env[62522]: DEBUG nova.compute.manager [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 674.148268] env[62522]: DEBUG nova.network.neutron [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Updating instance_info_cache with network_info: [{"id": "907f7f2d-f620-423c-bd77-a4685802e879", "address": "fa:16:3e:b4:ac:d1", "network": {"id": "098fc34b-2576-46ea-b611-12603e03cd93", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1626728290-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3b97af192e04ccfa14c4ee30eb6d056", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d5d0e0d-cdec-474a-a891-a9ceff15a8b2", "external-id": "nsx-vlan-transportzone-456", "segmentation_id": 456, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap907f7f2d-f6", "ovs_interfaceid": "907f7f2d-f620-423c-bd77-a4685802e879", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.236052] env[62522]: DEBUG oslo_vmware.api [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Task: {'id': task-2415083, 'name': ReconfigVM_Task, 'duration_secs': 0.87411} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.238663] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Reconfigured VM instance instance-0000000a to attach disk [datastore2] 9a098809-cc26-4210-b09e-b7825c406294/9a098809-cc26-4210-b09e-b7825c406294.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 674.241732] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c069f4c4-d19f-4276-99ec-c72c4cf5b687 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.249178] env[62522]: DEBUG oslo_vmware.api [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Waiting for the task: (returnval){ [ 674.249178] env[62522]: value = "task-2415084" [ 674.249178] env[62522]: _type = "Task" [ 674.249178] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.257209] env[62522]: DEBUG oslo_vmware.api [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Task: {'id': task-2415084, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.413685] env[62522]: DEBUG nova.compute.manager [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 674.413888] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 674.415195] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cf657bb-b1ce-4006-b616-e06385523d37 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.424030] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 674.424030] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-247e095a-7a53-45e3-8f08-d4b012017934 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.430751] env[62522]: DEBUG oslo_vmware.api [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Waiting for the task: (returnval){ [ 674.430751] env[62522]: value = "task-2415085" [ 674.430751] env[62522]: _type = "Task" [ 674.430751] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.439884] env[62522]: DEBUG oslo_vmware.api [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Task: {'id': task-2415085, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.544653] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b154e22-bba5-443c-a08b-99e2279c875f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.552269] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7816d73-55b8-4b6e-964a-58d30e10d265 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.586620] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffd3dbe4-5e2b-4443-9b5e-144bb5953436 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.594403] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec9b2064-62b6-4ace-a713-d6bd93b99bbd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.608609] env[62522]: DEBUG nova.compute.provider_tree [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 674.652657] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Releasing lock "refresh_cache-4de70165-c28f-44b7-a01a-caa0787170b8" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 674.653018] env[62522]: DEBUG nova.compute.manager [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Instance network_info: |[{"id": "907f7f2d-f620-423c-bd77-a4685802e879", "address": "fa:16:3e:b4:ac:d1", "network": {"id": "098fc34b-2576-46ea-b611-12603e03cd93", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1626728290-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3b97af192e04ccfa14c4ee30eb6d056", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d5d0e0d-cdec-474a-a891-a9ceff15a8b2", "external-id": "nsx-vlan-transportzone-456", "segmentation_id": 456, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap907f7f2d-f6", "ovs_interfaceid": "907f7f2d-f620-423c-bd77-a4685802e879", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 674.653462] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b4:ac:d1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7d5d0e0d-cdec-474a-a891-a9ceff15a8b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '907f7f2d-f620-423c-bd77-a4685802e879', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 674.662179] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Creating folder: Project (f3b97af192e04ccfa14c4ee30eb6d056). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 674.662179] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d5bb68ca-ef45-46e9-bc43-b16a6ed28c3d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.671756] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Created folder: Project (f3b97af192e04ccfa14c4ee30eb6d056) in parent group-v489562. [ 674.671959] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Creating folder: Instances. Parent ref: group-v489606. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 674.672209] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5c4e2f55-0c4d-4405-a7a8-e0c8ece779fd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.681093] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Created folder: Instances in parent group-v489606. [ 674.681348] env[62522]: DEBUG oslo.service.loopingcall [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 674.681614] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 674.681814] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6d75c944-c13d-41c2-ad92-ffbe70d3a61c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.702591] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 674.702591] env[62522]: value = "task-2415088" [ 674.702591] env[62522]: _type = "Task" [ 674.702591] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.714219] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415088, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.758378] env[62522]: DEBUG oslo_vmware.api [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Task: {'id': task-2415084, 'name': Rename_Task, 'duration_secs': 0.254589} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.758771] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 674.758912] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-91d51173-a799-4804-b4c6-de9e73633429 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.765882] env[62522]: DEBUG oslo_vmware.api [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Waiting for the task: (returnval){ [ 674.765882] env[62522]: value = "task-2415089" [ 674.765882] env[62522]: _type = "Task" [ 674.765882] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.774319] env[62522]: DEBUG oslo_vmware.api [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Task: {'id': task-2415089, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.943310] env[62522]: DEBUG oslo_vmware.api [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Task: {'id': task-2415085, 'name': PowerOffVM_Task, 'duration_secs': 0.215911} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.943883] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 674.944181] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 674.944547] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e28686d1-7a23-411d-9330-8af090c219d5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.008470] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 675.008932] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 675.008932] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Deleting the datastore file [datastore2] a804f755-58b2-4350-8726-4e82f60afcdc {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 675.009898] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-768dd4df-78b7-417c-b867-dfddf7ad0698 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.013806] env[62522]: DEBUG nova.compute.manager [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 675.023355] env[62522]: DEBUG oslo_vmware.api [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Waiting for the task: (returnval){ [ 675.023355] env[62522]: value = "task-2415091" [ 675.023355] env[62522]: _type = "Task" [ 675.023355] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.043091] env[62522]: DEBUG oslo_vmware.api [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Task: {'id': task-2415091, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.059112] env[62522]: DEBUG nova.virt.hardware [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 675.059112] env[62522]: DEBUG nova.virt.hardware [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 675.059112] env[62522]: DEBUG nova.virt.hardware [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 675.059112] env[62522]: DEBUG nova.virt.hardware [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 675.059440] env[62522]: DEBUG nova.virt.hardware [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 675.059440] env[62522]: DEBUG nova.virt.hardware [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 675.059440] env[62522]: DEBUG nova.virt.hardware [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 675.059440] env[62522]: DEBUG nova.virt.hardware [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 675.059440] env[62522]: DEBUG nova.virt.hardware [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 675.059609] env[62522]: DEBUG nova.virt.hardware [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 675.059609] env[62522]: DEBUG nova.virt.hardware [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 675.059609] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-343eb32d-921e-450f-99ae-65422e4a6b9c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.066968] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa78b009-753c-49b3-acb4-d3db9c24d706 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.081700] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Instance VIF info [] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 675.088211] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Creating folder: Project (f3f79491be1a44ecbd0fc6f92585f08b). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 675.088850] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0a2ea1bb-0023-454f-a0cb-ab0f33cbd23b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.098671] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Created folder: Project (f3f79491be1a44ecbd0fc6f92585f08b) in parent group-v489562. [ 675.099241] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Creating folder: Instances. Parent ref: group-v489609. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 675.099241] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5878d81b-ebcf-44f5-9094-c4273122dbc6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.112116] env[62522]: DEBUG nova.scheduler.client.report [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 675.115861] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Created folder: Instances in parent group-v489609. [ 675.116221] env[62522]: DEBUG oslo.service.loopingcall [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 675.118889] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 675.118889] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-de68e010-2ad8-4ace-886e-a0ebc1faea13 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.134826] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 675.134826] env[62522]: value = "task-2415094" [ 675.134826] env[62522]: _type = "Task" [ 675.134826] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.145029] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415094, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.213129] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415088, 'name': CreateVM_Task, 'duration_secs': 0.366393} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.213262] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 675.213993] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 675.214196] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.214498] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 675.214702] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e66a306-d3c4-4e01-9f7d-f0158d6b7fa6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.220139] env[62522]: DEBUG oslo_vmware.api [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Waiting for the task: (returnval){ [ 675.220139] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52743231-8a1e-bfc3-b6d9-d8c9bea883a6" [ 675.220139] env[62522]: _type = "Task" [ 675.220139] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.229122] env[62522]: DEBUG oslo_vmware.api [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52743231-8a1e-bfc3-b6d9-d8c9bea883a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.276567] env[62522]: DEBUG oslo_vmware.api [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Task: {'id': task-2415089, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.281400] env[62522]: DEBUG nova.compute.manager [req-ac17d480-9d1a-427d-ae69-1241401a1029 req-d21a9685-80f0-4cc1-b776-1d78983e08f3 service nova] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Received event network-changed-907f7f2d-f620-423c-bd77-a4685802e879 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 675.281400] env[62522]: DEBUG nova.compute.manager [req-ac17d480-9d1a-427d-ae69-1241401a1029 req-d21a9685-80f0-4cc1-b776-1d78983e08f3 service nova] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Refreshing instance network info cache due to event network-changed-907f7f2d-f620-423c-bd77-a4685802e879. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 675.281400] env[62522]: DEBUG oslo_concurrency.lockutils [req-ac17d480-9d1a-427d-ae69-1241401a1029 req-d21a9685-80f0-4cc1-b776-1d78983e08f3 service nova] Acquiring lock "refresh_cache-4de70165-c28f-44b7-a01a-caa0787170b8" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 675.281400] env[62522]: DEBUG oslo_concurrency.lockutils [req-ac17d480-9d1a-427d-ae69-1241401a1029 req-d21a9685-80f0-4cc1-b776-1d78983e08f3 service nova] Acquired lock "refresh_cache-4de70165-c28f-44b7-a01a-caa0787170b8" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.281400] env[62522]: DEBUG nova.network.neutron [req-ac17d480-9d1a-427d-ae69-1241401a1029 req-d21a9685-80f0-4cc1-b776-1d78983e08f3 service nova] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Refreshing network info cache for port 907f7f2d-f620-423c-bd77-a4685802e879 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 675.539539] env[62522]: DEBUG oslo_vmware.api [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Task: {'id': task-2415091, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156012} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.540061] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 675.541392] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 675.541392] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 675.541392] env[62522]: INFO nova.compute.manager [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Took 1.13 seconds to destroy the instance on the hypervisor. [ 675.542431] env[62522]: DEBUG oslo.service.loopingcall [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 675.542597] env[62522]: DEBUG nova.compute.manager [-] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 675.542737] env[62522]: DEBUG nova.network.neutron [-] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 675.617863] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.634s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 675.618101] env[62522]: DEBUG nova.compute.manager [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 675.622401] env[62522]: DEBUG oslo_concurrency.lockutils [None req-02931f0c-be4c-4034-998e-867499e2d977 tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.961s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.622401] env[62522]: DEBUG oslo_concurrency.lockutils [None req-02931f0c-be4c-4034-998e-867499e2d977 tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 675.625044] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.417s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.626913] env[62522]: INFO nova.compute.claims [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 675.646613] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415094, 'name': CreateVM_Task, 'duration_secs': 0.289465} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.647496] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 675.647496] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 675.674996] env[62522]: INFO nova.scheduler.client.report [None req-02931f0c-be4c-4034-998e-867499e2d977 tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Deleted allocations for instance 7828f9c8-fc02-4218-ba93-5362af807dad [ 675.735815] env[62522]: DEBUG oslo_vmware.api [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52743231-8a1e-bfc3-b6d9-d8c9bea883a6, 'name': SearchDatastore_Task, 'duration_secs': 0.010339} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.736147] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 675.736492] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 675.736744] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 675.736980] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.737074] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 675.737591] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.737664] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 675.737914] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-108084a2-9173-4bc3-9c85-0071334d5d21 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.741190] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3cdd8c1d-0fea-4017-92e7-56cb19407640 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.747486] env[62522]: DEBUG oslo_vmware.api [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Waiting for the task: (returnval){ [ 675.747486] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52ba662b-0fce-7c67-fbba-d0e60d65e333" [ 675.747486] env[62522]: _type = "Task" [ 675.747486] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.751812] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 675.751997] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 675.752998] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77364ca7-a51a-45c2-a827-e5d5fc2b2bc2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.759341] env[62522]: DEBUG oslo_vmware.api [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52ba662b-0fce-7c67-fbba-d0e60d65e333, 'name': SearchDatastore_Task, 'duration_secs': 0.008535} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.759964] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 675.760212] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 675.760430] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 675.766026] env[62522]: DEBUG oslo_vmware.api [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Waiting for the task: (returnval){ [ 675.766026] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d6700f-795a-344e-19fe-894c48af951e" [ 675.766026] env[62522]: _type = "Task" [ 675.766026] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.769791] env[62522]: DEBUG oslo_vmware.api [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d6700f-795a-344e-19fe-894c48af951e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.779170] env[62522]: DEBUG oslo_vmware.api [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Task: {'id': task-2415089, 'name': PowerOnVM_Task, 'duration_secs': 0.548315} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.779435] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 675.779631] env[62522]: DEBUG nova.compute.manager [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 675.780386] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb3ea7b-c506-4870-bab1-98051f434ab3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.033973] env[62522]: DEBUG nova.network.neutron [req-ac17d480-9d1a-427d-ae69-1241401a1029 req-d21a9685-80f0-4cc1-b776-1d78983e08f3 service nova] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Updated VIF entry in instance network info cache for port 907f7f2d-f620-423c-bd77-a4685802e879. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 676.034333] env[62522]: DEBUG nova.network.neutron [req-ac17d480-9d1a-427d-ae69-1241401a1029 req-d21a9685-80f0-4cc1-b776-1d78983e08f3 service nova] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Updating instance_info_cache with network_info: [{"id": "907f7f2d-f620-423c-bd77-a4685802e879", "address": "fa:16:3e:b4:ac:d1", "network": {"id": "098fc34b-2576-46ea-b611-12603e03cd93", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1626728290-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3b97af192e04ccfa14c4ee30eb6d056", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d5d0e0d-cdec-474a-a891-a9ceff15a8b2", "external-id": "nsx-vlan-transportzone-456", "segmentation_id": 456, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap907f7f2d-f6", "ovs_interfaceid": "907f7f2d-f620-423c-bd77-a4685802e879", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 676.136706] env[62522]: DEBUG nova.compute.utils [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 676.138247] env[62522]: DEBUG nova.compute.manager [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 676.138621] env[62522]: DEBUG nova.network.neutron [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 676.184477] env[62522]: DEBUG oslo_concurrency.lockutils [None req-02931f0c-be4c-4034-998e-867499e2d977 tempest-InstanceActionsV221TestJSON-1072674063 tempest-InstanceActionsV221TestJSON-1072674063-project-member] Lock "7828f9c8-fc02-4218-ba93-5362af807dad" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.713s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 676.213876] env[62522]: DEBUG nova.policy [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cf02455354954275b86bee37d357f071', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0dae444f2b5845aa9264fea1f237f0e3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 676.273802] env[62522]: DEBUG oslo_vmware.api [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d6700f-795a-344e-19fe-894c48af951e, 'name': SearchDatastore_Task, 'duration_secs': 0.007802} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.274668] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-761f6cdf-5dce-4f26-a0ad-831daec2fdfc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.280341] env[62522]: DEBUG oslo_vmware.api [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Waiting for the task: (returnval){ [ 676.280341] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52114398-cbb1-153c-b30a-8e1a48049581" [ 676.280341] env[62522]: _type = "Task" [ 676.280341] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.298279] env[62522]: DEBUG oslo_vmware.api [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52114398-cbb1-153c-b30a-8e1a48049581, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.300768] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 676.513089] env[62522]: DEBUG nova.network.neutron [-] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 676.537479] env[62522]: DEBUG oslo_concurrency.lockutils [req-ac17d480-9d1a-427d-ae69-1241401a1029 req-d21a9685-80f0-4cc1-b776-1d78983e08f3 service nova] Releasing lock "refresh_cache-4de70165-c28f-44b7-a01a-caa0787170b8" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 676.643291] env[62522]: DEBUG nova.compute.manager [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 676.775068] env[62522]: DEBUG nova.network.neutron [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Successfully created port: d2781fca-06c0-403d-8704-705de755c0a0 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 676.795777] env[62522]: DEBUG oslo_vmware.api [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52114398-cbb1-153c-b30a-8e1a48049581, 'name': SearchDatastore_Task, 'duration_secs': 0.011076} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.795983] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 676.796353] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 4de70165-c28f-44b7-a01a-caa0787170b8/4de70165-c28f-44b7-a01a-caa0787170b8.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 676.797093] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.797093] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 676.797423] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-14afac6a-9bfd-43d9-8124-b8ff3e3cc833 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.803094] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5bf2436c-54aa-49bc-a3dc-9179da65b491 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.810663] env[62522]: DEBUG oslo_vmware.api [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Waiting for the task: (returnval){ [ 676.810663] env[62522]: value = "task-2415095" [ 676.810663] env[62522]: _type = "Task" [ 676.810663] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.814987] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 676.815202] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 676.818778] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06d193af-f94b-40a2-880e-38165afe1f1c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.826042] env[62522]: DEBUG oslo_vmware.api [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Task: {'id': task-2415095, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.828328] env[62522]: DEBUG oslo_vmware.api [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Waiting for the task: (returnval){ [ 676.828328] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5220eaa4-6591-a99c-8b17-099a4e5ab810" [ 676.828328] env[62522]: _type = "Task" [ 676.828328] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.842155] env[62522]: DEBUG oslo_vmware.api [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5220eaa4-6591-a99c-8b17-099a4e5ab810, 'name': SearchDatastore_Task, 'duration_secs': 0.009162} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.842155] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a4ccc8d-070a-40b0-9ea6-754934e1f323 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.852024] env[62522]: DEBUG oslo_vmware.api [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Waiting for the task: (returnval){ [ 676.852024] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52759f65-dc20-53f9-c5ff-74d093f5092d" [ 676.852024] env[62522]: _type = "Task" [ 676.852024] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.863029] env[62522]: DEBUG oslo_vmware.api [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52759f65-dc20-53f9-c5ff-74d093f5092d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.016591] env[62522]: INFO nova.compute.manager [-] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Took 1.47 seconds to deallocate network for instance. [ 677.163263] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c23c100-609c-455f-8c84-985af152a17a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.171367] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4daadf3-159a-496d-93e7-a824e219e447 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.210576] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c13b6f2b-8ddf-4e21-85e9-69bd807c6380 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.221157] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da44763-c79c-4283-8131-3a0f21fd8a9c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.240134] env[62522]: DEBUG nova.compute.provider_tree [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 677.323198] env[62522]: DEBUG oslo_vmware.api [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Task: {'id': task-2415095, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.482249} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.323870] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 4de70165-c28f-44b7-a01a-caa0787170b8/4de70165-c28f-44b7-a01a-caa0787170b8.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 677.324041] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 677.324311] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2f2ac195-1b3f-41b0-9178-24c5be557f9c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.335099] env[62522]: DEBUG oslo_vmware.api [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Waiting for the task: (returnval){ [ 677.335099] env[62522]: value = "task-2415096" [ 677.335099] env[62522]: _type = "Task" [ 677.335099] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.344823] env[62522]: DEBUG oslo_vmware.api [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Task: {'id': task-2415096, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.365681] env[62522]: DEBUG oslo_vmware.api [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52759f65-dc20-53f9-c5ff-74d093f5092d, 'name': SearchDatastore_Task, 'duration_secs': 0.008804} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.366068] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 677.366801] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] c73686c6-4dd8-4f00-a65a-5d8574409ad1/c73686c6-4dd8-4f00-a65a-5d8574409ad1.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 677.367148] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-15e1d216-1120-4b36-a460-11990c3e0368 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.374627] env[62522]: DEBUG oslo_vmware.api [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Waiting for the task: (returnval){ [ 677.374627] env[62522]: value = "task-2415097" [ 677.374627] env[62522]: _type = "Task" [ 677.374627] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.387172] env[62522]: DEBUG oslo_vmware.api [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415097, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.490336] env[62522]: DEBUG nova.compute.manager [req-e4625a41-ae1f-4061-aa94-8efccad6cf12 req-8202e936-0cc6-4359-a443-a4348bad0555 service nova] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Received event network-vif-deleted-e05da487-a40f-44d2-a390-d0795275ff10 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 677.526029] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 677.655628] env[62522]: DEBUG nova.compute.manager [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 677.690728] env[62522]: DEBUG nova.virt.hardware [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:18:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1943045594',id=22,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1478446300',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 677.690728] env[62522]: DEBUG nova.virt.hardware [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 677.691101] env[62522]: DEBUG nova.virt.hardware [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 677.691337] env[62522]: DEBUG nova.virt.hardware [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 677.692375] env[62522]: DEBUG nova.virt.hardware [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 677.692375] env[62522]: DEBUG nova.virt.hardware [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 677.692375] env[62522]: DEBUG nova.virt.hardware [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 677.692375] env[62522]: DEBUG nova.virt.hardware [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 677.692602] env[62522]: DEBUG nova.virt.hardware [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 677.693334] env[62522]: DEBUG nova.virt.hardware [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 677.693334] env[62522]: DEBUG nova.virt.hardware [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 677.696682] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d135d099-13f4-4539-9dce-60f1d8ab174d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.704431] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d82f000-914d-4c71-b824-2e0de0c2519c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.745933] env[62522]: DEBUG nova.scheduler.client.report [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 677.848993] env[62522]: DEBUG oslo_vmware.api [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Task: {'id': task-2415096, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.143062} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.849856] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 677.850816] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2943842f-d6ac-4298-8955-bb3549a52562 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.881340] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] 4de70165-c28f-44b7-a01a-caa0787170b8/4de70165-c28f-44b7-a01a-caa0787170b8.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 677.881431] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-14c6c4ad-aef6-478e-b289-c2a9af4d3434 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.911022] env[62522]: DEBUG oslo_vmware.api [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415097, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.44063} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.913147] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] c73686c6-4dd8-4f00-a65a-5d8574409ad1/c73686c6-4dd8-4f00-a65a-5d8574409ad1.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 677.914260] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 677.914640] env[62522]: DEBUG oslo_vmware.api [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Waiting for the task: (returnval){ [ 677.914640] env[62522]: value = "task-2415098" [ 677.914640] env[62522]: _type = "Task" [ 677.914640] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.914854] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-39806790-6d8d-4372-9867-705f64fcea9a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.927111] env[62522]: DEBUG oslo_vmware.api [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Waiting for the task: (returnval){ [ 677.927111] env[62522]: value = "task-2415099" [ 677.927111] env[62522]: _type = "Task" [ 677.927111] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.927527] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Acquiring lock "9a098809-cc26-4210-b09e-b7825c406294" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 677.927753] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Lock "9a098809-cc26-4210-b09e-b7825c406294" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 677.927955] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Acquiring lock "9a098809-cc26-4210-b09e-b7825c406294-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 677.928204] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Lock "9a098809-cc26-4210-b09e-b7825c406294-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 677.928476] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Lock "9a098809-cc26-4210-b09e-b7825c406294-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 677.934060] env[62522]: INFO nova.compute.manager [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Terminating instance [ 677.943048] env[62522]: DEBUG oslo_vmware.api [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415099, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.250520] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.625s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 678.252847] env[62522]: DEBUG nova.compute.manager [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 678.256259] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.022s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 678.258082] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.002s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 678.259888] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d624558a-64db-4c66-ac73-7c3fcc5b3bb8 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.323s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 678.260395] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d624558a-64db-4c66-ac73-7c3fcc5b3bb8 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 678.262014] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.641s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 678.263405] env[62522]: INFO nova.compute.claims [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 678.306992] env[62522]: INFO nova.scheduler.client.report [None req-d624558a-64db-4c66-ac73-7c3fcc5b3bb8 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Deleted allocations for instance 758ed671-347a-4949-9842-2f8cdcd261ae [ 678.309750] env[62522]: INFO nova.scheduler.client.report [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Deleted allocations for instance c8779822-1694-463e-bd06-5f84d867d1bd [ 678.419978] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Acquiring lock "63a7f41d-13cc-420a-96d3-a3f102869137" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 678.419978] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Lock "63a7f41d-13cc-420a-96d3-a3f102869137" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 678.429520] env[62522]: DEBUG oslo_vmware.api [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Task: {'id': task-2415098, 'name': ReconfigVM_Task, 'duration_secs': 0.288513} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.432702] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Reconfigured VM instance instance-0000000f to attach disk [datastore1] 4de70165-c28f-44b7-a01a-caa0787170b8/4de70165-c28f-44b7-a01a-caa0787170b8.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 678.433344] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0ed46597-2c80-475f-95f8-179ec4340f1c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.439549] env[62522]: DEBUG oslo_vmware.api [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415099, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069925} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.440804] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 678.441220] env[62522]: DEBUG oslo_vmware.api [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Waiting for the task: (returnval){ [ 678.441220] env[62522]: value = "task-2415100" [ 678.441220] env[62522]: _type = "Task" [ 678.441220] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.441882] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb90c9ac-58b3-4d60-a435-85225ec3ca3e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.444849] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Acquiring lock "refresh_cache-9a098809-cc26-4210-b09e-b7825c406294" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 678.444999] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Acquired lock "refresh_cache-9a098809-cc26-4210-b09e-b7825c406294" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.445182] env[62522]: DEBUG nova.network.neutron [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 678.470209] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] c73686c6-4dd8-4f00-a65a-5d8574409ad1/c73686c6-4dd8-4f00-a65a-5d8574409ad1.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 678.475015] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c194cfc7-bf64-4b0d-9666-341887070f34 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.488909] env[62522]: DEBUG oslo_vmware.api [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Task: {'id': task-2415100, 'name': Rename_Task} progress is 10%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.494660] env[62522]: DEBUG oslo_vmware.api [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Waiting for the task: (returnval){ [ 678.494660] env[62522]: value = "task-2415101" [ 678.494660] env[62522]: _type = "Task" [ 678.494660] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.504270] env[62522]: DEBUG oslo_vmware.api [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415101, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.549013] env[62522]: DEBUG nova.compute.manager [req-554865b2-4c13-4d2f-a977-c0bedf0d06be req-211dc02b-866a-4c98-a744-9f20c7cdec85 service nova] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Received event network-vif-plugged-d2781fca-06c0-403d-8704-705de755c0a0 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 678.549261] env[62522]: DEBUG oslo_concurrency.lockutils [req-554865b2-4c13-4d2f-a977-c0bedf0d06be req-211dc02b-866a-4c98-a744-9f20c7cdec85 service nova] Acquiring lock "7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 678.549473] env[62522]: DEBUG oslo_concurrency.lockutils [req-554865b2-4c13-4d2f-a977-c0bedf0d06be req-211dc02b-866a-4c98-a744-9f20c7cdec85 service nova] Lock "7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 678.549743] env[62522]: DEBUG oslo_concurrency.lockutils [req-554865b2-4c13-4d2f-a977-c0bedf0d06be req-211dc02b-866a-4c98-a744-9f20c7cdec85 service nova] Lock "7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 678.549793] env[62522]: DEBUG nova.compute.manager [req-554865b2-4c13-4d2f-a977-c0bedf0d06be req-211dc02b-866a-4c98-a744-9f20c7cdec85 service nova] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] No waiting events found dispatching network-vif-plugged-d2781fca-06c0-403d-8704-705de755c0a0 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 678.549959] env[62522]: WARNING nova.compute.manager [req-554865b2-4c13-4d2f-a977-c0bedf0d06be req-211dc02b-866a-4c98-a744-9f20c7cdec85 service nova] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Received unexpected event network-vif-plugged-d2781fca-06c0-403d-8704-705de755c0a0 for instance with vm_state building and task_state spawning. [ 678.673765] env[62522]: DEBUG nova.network.neutron [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Successfully updated port: d2781fca-06c0-403d-8704-705de755c0a0 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 678.768377] env[62522]: DEBUG nova.compute.utils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 678.771794] env[62522]: DEBUG nova.compute.manager [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 678.771964] env[62522]: DEBUG nova.network.neutron [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 678.823735] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d624558a-64db-4c66-ac73-7c3fcc5b3bb8 tempest-ServerDiagnosticsTest-1704629071 tempest-ServerDiagnosticsTest-1704629071-project-member] Lock "758ed671-347a-4949-9842-2f8cdcd261ae" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.620s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 678.826326] env[62522]: DEBUG nova.policy [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e3ce17ca2f0d457c8768549d66b1400a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e47f8c538134439d8405e2825ad0af22', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 678.833825] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70878bd0-a303-4c25-87ca-d1ac213c83cb tempest-ServerDiagnosticsV248Test-1540031270 tempest-ServerDiagnosticsV248Test-1540031270-project-member] Lock "c8779822-1694-463e-bd06-5f84d867d1bd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.004s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 678.955907] env[62522]: DEBUG oslo_vmware.api [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Task: {'id': task-2415100, 'name': Rename_Task, 'duration_secs': 0.141448} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.956294] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 678.956728] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c7e9bb20-b20b-47a8-8a28-1ec22e2a7f98 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.962885] env[62522]: DEBUG oslo_vmware.api [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Waiting for the task: (returnval){ [ 678.962885] env[62522]: value = "task-2415102" [ 678.962885] env[62522]: _type = "Task" [ 678.962885] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.970421] env[62522]: DEBUG oslo_vmware.api [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Task: {'id': task-2415102, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.974838] env[62522]: DEBUG nova.network.neutron [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 679.009461] env[62522]: DEBUG oslo_vmware.api [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415101, 'name': ReconfigVM_Task, 'duration_secs': 0.266568} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.009760] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Reconfigured VM instance instance-00000011 to attach disk [datastore1] c73686c6-4dd8-4f00-a65a-5d8574409ad1/c73686c6-4dd8-4f00-a65a-5d8574409ad1.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 679.010386] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-93474a42-20d6-4cb0-9e57-ea431c4d8e1e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.016409] env[62522]: DEBUG oslo_vmware.api [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Waiting for the task: (returnval){ [ 679.016409] env[62522]: value = "task-2415103" [ 679.016409] env[62522]: _type = "Task" [ 679.016409] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.024640] env[62522]: DEBUG oslo_vmware.api [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415103, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.058970] env[62522]: DEBUG nova.network.neutron [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.179209] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Acquiring lock "refresh_cache-7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 679.179727] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Acquired lock "refresh_cache-7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.179727] env[62522]: DEBUG nova.network.neutron [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 679.275456] env[62522]: DEBUG nova.compute.manager [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 679.476351] env[62522]: DEBUG oslo_vmware.api [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Task: {'id': task-2415102, 'name': PowerOnVM_Task, 'duration_secs': 0.464497} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.476617] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 679.476815] env[62522]: INFO nova.compute.manager [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Took 7.13 seconds to spawn the instance on the hypervisor. [ 679.477033] env[62522]: DEBUG nova.compute.manager [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 679.477995] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49145f7d-90fa-45e1-b3ad-ee77be95ecb0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.513461] env[62522]: DEBUG nova.network.neutron [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Successfully created port: 7d31f9c2-3052-4e8a-b932-8aa226e03b49 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 679.527049] env[62522]: DEBUG oslo_vmware.api [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415103, 'name': Rename_Task, 'duration_secs': 0.14831} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.528191] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 679.528191] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d1485f00-f856-4a93-bfcd-5d5eacdd0a77 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.534019] env[62522]: DEBUG oslo_vmware.api [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Waiting for the task: (returnval){ [ 679.534019] env[62522]: value = "task-2415104" [ 679.534019] env[62522]: _type = "Task" [ 679.534019] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.547204] env[62522]: DEBUG oslo_vmware.api [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415104, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.561719] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Releasing lock "refresh_cache-9a098809-cc26-4210-b09e-b7825c406294" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 679.562185] env[62522]: DEBUG nova.compute.manager [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 679.562384] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 679.563268] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23dd2511-006b-45ea-b731-96f5bd2cce7b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.571479] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 679.574071] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-646c6ef5-602a-4168-9f81-483f63b49835 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.580156] env[62522]: DEBUG oslo_vmware.api [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Waiting for the task: (returnval){ [ 679.580156] env[62522]: value = "task-2415105" [ 679.580156] env[62522]: _type = "Task" [ 679.580156] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.589440] env[62522]: DEBUG oslo_vmware.api [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': task-2415105, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.736620] env[62522]: DEBUG nova.network.neutron [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 679.790582] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64fc20cf-ba66-4315-8f6a-ce9aeba3f1d2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.800330] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-084efd91-0bc0-48f3-95af-722b6f226d55 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.833909] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f07fcf2-3e9b-4c38-a4a2-108fe9acc487 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.842550] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5593ffde-b822-4c69-a671-8e516bbd280e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.862140] env[62522]: DEBUG nova.compute.provider_tree [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 679.969319] env[62522]: DEBUG nova.network.neutron [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Updating instance_info_cache with network_info: [{"id": "d2781fca-06c0-403d-8704-705de755c0a0", "address": "fa:16:3e:74:71:d5", "network": {"id": "2d13676e-cb66-4166-b85f-b3c122b27f67", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1154652116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0dae444f2b5845aa9264fea1f237f0e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2781fca-06", "ovs_interfaceid": "d2781fca-06c0-403d-8704-705de755c0a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.999831] env[62522]: INFO nova.compute.manager [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Took 34.70 seconds to build instance. [ 680.047470] env[62522]: DEBUG oslo_vmware.api [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415104, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.096238] env[62522]: DEBUG oslo_vmware.api [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': task-2415105, 'name': PowerOffVM_Task, 'duration_secs': 0.128196} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.096943] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 680.096943] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 680.096943] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f89080e8-fb6b-4e26-9d3d-581d35631445 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.125484] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 680.125700] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 680.126636] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Deleting the datastore file [datastore2] 9a098809-cc26-4210-b09e-b7825c406294 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 680.126636] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-533425cd-7c6f-4416-89e6-646620552147 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.133772] env[62522]: DEBUG oslo_vmware.api [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Waiting for the task: (returnval){ [ 680.133772] env[62522]: value = "task-2415107" [ 680.133772] env[62522]: _type = "Task" [ 680.133772] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.141560] env[62522]: DEBUG oslo_vmware.api [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': task-2415107, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.298781] env[62522]: DEBUG nova.compute.manager [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 680.324291] env[62522]: DEBUG nova.virt.hardware [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 680.324291] env[62522]: DEBUG nova.virt.hardware [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 680.324291] env[62522]: DEBUG nova.virt.hardware [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 680.324449] env[62522]: DEBUG nova.virt.hardware [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 680.324449] env[62522]: DEBUG nova.virt.hardware [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 680.324449] env[62522]: DEBUG nova.virt.hardware [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 680.324449] env[62522]: DEBUG nova.virt.hardware [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 680.324449] env[62522]: DEBUG nova.virt.hardware [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 680.324616] env[62522]: DEBUG nova.virt.hardware [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 680.324938] env[62522]: DEBUG nova.virt.hardware [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 680.325074] env[62522]: DEBUG nova.virt.hardware [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 680.325954] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-250204b7-a070-4569-921e-ab826e4ce31b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.335172] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-918b122b-ce00-4f8c-a7c4-a42c708cc854 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.365687] env[62522]: DEBUG nova.scheduler.client.report [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 680.473212] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Releasing lock "refresh_cache-7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 680.473212] env[62522]: DEBUG nova.compute.manager [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Instance network_info: |[{"id": "d2781fca-06c0-403d-8704-705de755c0a0", "address": "fa:16:3e:74:71:d5", "network": {"id": "2d13676e-cb66-4166-b85f-b3c122b27f67", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1154652116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0dae444f2b5845aa9264fea1f237f0e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2781fca-06", "ovs_interfaceid": "d2781fca-06c0-403d-8704-705de755c0a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 680.476359] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:71:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa09e855-8af1-419b-b78d-8ffcc94b1bfb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd2781fca-06c0-403d-8704-705de755c0a0', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 680.484942] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Creating folder: Project (0dae444f2b5845aa9264fea1f237f0e3). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 680.485652] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-50ef8ec1-714c-47ca-9c0f-0fdad19ed5fe {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.498913] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Created folder: Project (0dae444f2b5845aa9264fea1f237f0e3) in parent group-v489562. [ 680.499108] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Creating folder: Instances. Parent ref: group-v489612. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 680.499379] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8f1e295b-b86c-4f2d-9cb3-1ce5c4374226 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.502911] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1214070c-1d66-477b-8084-c4c10d15673d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Lock "4de70165-c28f-44b7-a01a-caa0787170b8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.475s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 680.512884] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Created folder: Instances in parent group-v489612. [ 680.513937] env[62522]: DEBUG oslo.service.loopingcall [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 680.513937] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 680.513937] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f8c516a7-93d2-4e3d-8800-13f14cdb10dc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.541565] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 680.541565] env[62522]: value = "task-2415110" [ 680.541565] env[62522]: _type = "Task" [ 680.541565] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.550558] env[62522]: DEBUG oslo_vmware.api [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415104, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.555717] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415110, 'name': CreateVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.649151] env[62522]: DEBUG oslo_vmware.api [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Task: {'id': task-2415107, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151182} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.649450] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 680.650043] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 680.650043] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 680.650043] env[62522]: INFO nova.compute.manager [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Took 1.09 seconds to destroy the instance on the hypervisor. [ 680.650293] env[62522]: DEBUG oslo.service.loopingcall [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 680.650480] env[62522]: DEBUG nova.compute.manager [-] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 680.650571] env[62522]: DEBUG nova.network.neutron [-] [instance: 9a098809-cc26-4210-b09e-b7825c406294] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 680.675229] env[62522]: DEBUG nova.network.neutron [-] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 680.701851] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquiring lock "d30397b4-c617-4717-b624-ad1b06331bea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 680.702102] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lock "d30397b4-c617-4717-b624-ad1b06331bea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 680.871535] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.610s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 680.872096] env[62522]: DEBUG nova.compute.manager [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 680.874821] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bb32ad7a-c1e4-4ac7-9d2a-960491da577f tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.863s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 680.875037] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bb32ad7a-c1e4-4ac7-9d2a-960491da577f tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 680.878701] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.378s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 680.880611] env[62522]: INFO nova.compute.claims [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 680.903769] env[62522]: INFO nova.scheduler.client.report [None req-bb32ad7a-c1e4-4ac7-9d2a-960491da577f tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Deleted allocations for instance 678b6b5f-b410-4c55-872e-4a74da6d7ebc [ 681.010498] env[62522]: DEBUG nova.compute.manager [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 681.013765] env[62522]: DEBUG nova.compute.manager [req-8f6f29ab-b2f5-4beb-b372-432c07a2c9f6 req-fcb710ff-df42-4995-8b7e-91e29dcc509a service nova] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Received event network-changed-d2781fca-06c0-403d-8704-705de755c0a0 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 681.014832] env[62522]: DEBUG nova.compute.manager [req-8f6f29ab-b2f5-4beb-b372-432c07a2c9f6 req-fcb710ff-df42-4995-8b7e-91e29dcc509a service nova] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Refreshing instance network info cache due to event network-changed-d2781fca-06c0-403d-8704-705de755c0a0. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 681.015332] env[62522]: DEBUG oslo_concurrency.lockutils [req-8f6f29ab-b2f5-4beb-b372-432c07a2c9f6 req-fcb710ff-df42-4995-8b7e-91e29dcc509a service nova] Acquiring lock "refresh_cache-7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 681.017072] env[62522]: DEBUG oslo_concurrency.lockutils [req-8f6f29ab-b2f5-4beb-b372-432c07a2c9f6 req-fcb710ff-df42-4995-8b7e-91e29dcc509a service nova] Acquired lock "refresh_cache-7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.017072] env[62522]: DEBUG nova.network.neutron [req-8f6f29ab-b2f5-4beb-b372-432c07a2c9f6 req-fcb710ff-df42-4995-8b7e-91e29dcc509a service nova] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Refreshing network info cache for port d2781fca-06c0-403d-8704-705de755c0a0 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 681.052417] env[62522]: DEBUG oslo_vmware.api [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415104, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.057845] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415110, 'name': CreateVM_Task, 'duration_secs': 0.411817} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.058047] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 681.058808] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 681.058976] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.059344] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 681.059613] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69f5b877-5d37-4d6b-900a-f4342b83069c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.067645] env[62522]: DEBUG oslo_vmware.api [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Waiting for the task: (returnval){ [ 681.067645] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52375f45-72c2-557e-8f38-672f4fbe97f3" [ 681.067645] env[62522]: _type = "Task" [ 681.067645] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.080385] env[62522]: DEBUG oslo_vmware.api [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52375f45-72c2-557e-8f38-672f4fbe97f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.179698] env[62522]: DEBUG nova.network.neutron [-] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.294126] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquiring lock "0d36b844-554e-46e7-9cf9-ef04b67e8898" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 681.294126] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lock "0d36b844-554e-46e7-9cf9-ef04b67e8898" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 681.386570] env[62522]: DEBUG nova.compute.utils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 681.390894] env[62522]: DEBUG nova.compute.manager [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 681.392975] env[62522]: DEBUG nova.network.neutron [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 681.413674] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bb32ad7a-c1e4-4ac7-9d2a-960491da577f tempest-ServerExternalEventsTest-202892512 tempest-ServerExternalEventsTest-202892512-project-member] Lock "678b6b5f-b410-4c55-872e-4a74da6d7ebc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.291s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 681.489781] env[62522]: DEBUG nova.policy [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e3ce17ca2f0d457c8768549d66b1400a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e47f8c538134439d8405e2825ad0af22', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 681.539960] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 681.552464] env[62522]: DEBUG oslo_vmware.api [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415104, 'name': PowerOnVM_Task, 'duration_secs': 1.554586} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.553730] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 681.554058] env[62522]: INFO nova.compute.manager [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Took 6.54 seconds to spawn the instance on the hypervisor. [ 681.554358] env[62522]: DEBUG nova.compute.manager [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 681.555802] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a20d783d-a5d0-4647-8d55-5a531d80f0fd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.587796] env[62522]: DEBUG oslo_vmware.api [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52375f45-72c2-557e-8f38-672f4fbe97f3, 'name': SearchDatastore_Task, 'duration_secs': 0.02412} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.587796] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 681.591308] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 681.591711] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 681.591797] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.592071] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 681.592313] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-80352bb5-ea22-4e64-883b-46442691f804 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.602483] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 681.602692] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 681.604170] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e786d7b6-4eb8-4dc6-973f-7590a1452630 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.610070] env[62522]: DEBUG oslo_vmware.api [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Waiting for the task: (returnval){ [ 681.610070] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]528d335f-20f4-f8f5-e12b-70cda48e844a" [ 681.610070] env[62522]: _type = "Task" [ 681.610070] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.623881] env[62522]: DEBUG oslo_vmware.api [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]528d335f-20f4-f8f5-e12b-70cda48e844a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.628288] env[62522]: DEBUG nova.network.neutron [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Successfully updated port: 7d31f9c2-3052-4e8a-b932-8aa226e03b49 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 681.683824] env[62522]: INFO nova.compute.manager [-] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Took 1.03 seconds to deallocate network for instance. [ 681.892126] env[62522]: DEBUG nova.compute.manager [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 682.086830] env[62522]: DEBUG nova.network.neutron [req-8f6f29ab-b2f5-4beb-b372-432c07a2c9f6 req-fcb710ff-df42-4995-8b7e-91e29dcc509a service nova] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Updated VIF entry in instance network info cache for port d2781fca-06c0-403d-8704-705de755c0a0. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 682.087762] env[62522]: DEBUG nova.network.neutron [req-8f6f29ab-b2f5-4beb-b372-432c07a2c9f6 req-fcb710ff-df42-4995-8b7e-91e29dcc509a service nova] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Updating instance_info_cache with network_info: [{"id": "d2781fca-06c0-403d-8704-705de755c0a0", "address": "fa:16:3e:74:71:d5", "network": {"id": "2d13676e-cb66-4166-b85f-b3c122b27f67", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1154652116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0dae444f2b5845aa9264fea1f237f0e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2781fca-06", "ovs_interfaceid": "d2781fca-06c0-403d-8704-705de755c0a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.092380] env[62522]: INFO nova.compute.manager [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Took 36.17 seconds to build instance. [ 682.123938] env[62522]: DEBUG oslo_vmware.api [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]528d335f-20f4-f8f5-e12b-70cda48e844a, 'name': SearchDatastore_Task, 'duration_secs': 0.028819} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.130061] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb79b2ce-f557-488b-b692-8afc15d79563 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.144158] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquiring lock "refresh_cache-6d8b5429-113b-4280-9851-bf6614dde4a7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 682.144158] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquired lock "refresh_cache-6d8b5429-113b-4280-9851-bf6614dde4a7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.144286] env[62522]: DEBUG nova.network.neutron [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 682.153287] env[62522]: DEBUG oslo_concurrency.lockutils [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquiring lock "d6935c9b-e4cc-47ed-96d5-e485d60382d6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.153287] env[62522]: DEBUG oslo_concurrency.lockutils [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lock "d6935c9b-e4cc-47ed-96d5-e485d60382d6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 682.158512] env[62522]: DEBUG oslo_vmware.api [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Waiting for the task: (returnval){ [ 682.158512] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529b544f-22a2-26d6-5b3a-80cb678ffab6" [ 682.158512] env[62522]: _type = "Task" [ 682.158512] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.168756] env[62522]: DEBUG oslo_vmware.api [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529b544f-22a2-26d6-5b3a-80cb678ffab6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.191238] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.281250] env[62522]: DEBUG nova.network.neutron [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Successfully created port: 80d82c33-bbd9-41b4-ba21-705502101cf8 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 682.491196] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5a21122-4a0b-41c2-94c0-21617f70d5c3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.505809] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fccc0ca-76a1-4a0d-8ca4-8007cf44b60a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.547501] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71e15771-456b-4de9-9802-68868e7c8841 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.553709] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Acquiring lock "4de70165-c28f-44b7-a01a-caa0787170b8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.553944] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Lock "4de70165-c28f-44b7-a01a-caa0787170b8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 682.554153] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Acquiring lock "4de70165-c28f-44b7-a01a-caa0787170b8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.554501] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Lock "4de70165-c28f-44b7-a01a-caa0787170b8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 682.554501] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Lock "4de70165-c28f-44b7-a01a-caa0787170b8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 682.557116] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-852f25ba-e3bb-4cd4-86fb-06b11d111981 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.562704] env[62522]: INFO nova.compute.manager [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Terminating instance [ 682.573990] env[62522]: DEBUG nova.compute.provider_tree [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 682.593073] env[62522]: DEBUG oslo_concurrency.lockutils [req-8f6f29ab-b2f5-4beb-b372-432c07a2c9f6 req-fcb710ff-df42-4995-8b7e-91e29dcc509a service nova] Releasing lock "refresh_cache-7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 682.594767] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e2d59854-6756-4791-b0b6-610b1d930159 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Lock "c73686c6-4dd8-4f00-a65a-5d8574409ad1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.760s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 682.666685] env[62522]: DEBUG oslo_vmware.api [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529b544f-22a2-26d6-5b3a-80cb678ffab6, 'name': SearchDatastore_Task, 'duration_secs': 0.011735} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.666946] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 682.667040] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a/7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 682.667284] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dbbda627-0c2a-40fe-b556-bca496a83d28 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.675529] env[62522]: DEBUG oslo_vmware.api [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Waiting for the task: (returnval){ [ 682.675529] env[62522]: value = "task-2415111" [ 682.675529] env[62522]: _type = "Task" [ 682.675529] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.685142] env[62522]: DEBUG oslo_vmware.api [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2415111, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.716156] env[62522]: DEBUG nova.network.neutron [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 682.901950] env[62522]: DEBUG nova.compute.manager [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 682.926762] env[62522]: DEBUG nova.virt.hardware [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 682.927026] env[62522]: DEBUG nova.virt.hardware [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 682.927190] env[62522]: DEBUG nova.virt.hardware [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 682.927374] env[62522]: DEBUG nova.virt.hardware [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 682.927519] env[62522]: DEBUG nova.virt.hardware [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 682.927664] env[62522]: DEBUG nova.virt.hardware [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 682.927871] env[62522]: DEBUG nova.virt.hardware [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 682.928039] env[62522]: DEBUG nova.virt.hardware [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 682.928272] env[62522]: DEBUG nova.virt.hardware [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 682.928471] env[62522]: DEBUG nova.virt.hardware [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 682.928657] env[62522]: DEBUG nova.virt.hardware [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 682.929568] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffe1bc08-dad4-4edb-a328-986def5cedd3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.938103] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39aff98a-f025-4cad-a1c7-fb40c8c7d43d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.076092] env[62522]: DEBUG nova.compute.manager [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 683.076480] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 683.077461] env[62522]: DEBUG nova.scheduler.client.report [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 683.084526] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc915c85-6cfd-4169-9cad-5ce8572b5103 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.093064] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 683.093396] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-637c3cfb-c7f3-405d-9715-6f47d0e8bb2a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.097396] env[62522]: DEBUG nova.compute.manager [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 683.101203] env[62522]: DEBUG oslo_vmware.api [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Waiting for the task: (returnval){ [ 683.101203] env[62522]: value = "task-2415112" [ 683.101203] env[62522]: _type = "Task" [ 683.101203] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.110317] env[62522]: DEBUG oslo_vmware.api [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Task: {'id': task-2415112, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.114296] env[62522]: DEBUG nova.network.neutron [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Updating instance_info_cache with network_info: [{"id": "7d31f9c2-3052-4e8a-b932-8aa226e03b49", "address": "fa:16:3e:cd:a2:bd", "network": {"id": "6cb7cdaa-cbcd-4565-a222-310242ff25b1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-684499993-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e47f8c538134439d8405e2825ad0af22", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0746f464-a938-427b-ba02-600449df5070", "external-id": "nsx-vlan-transportzone-881", "segmentation_id": 881, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d31f9c2-30", "ovs_interfaceid": "7d31f9c2-3052-4e8a-b932-8aa226e03b49", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 683.186728] env[62522]: DEBUG oslo_vmware.api [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2415111, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.306619] env[62522]: INFO nova.compute.manager [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Rebuilding instance [ 683.385605] env[62522]: DEBUG nova.compute.manager [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 683.387772] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9808994-7acb-4999-a5e1-0796033f8eda {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.580069] env[62522]: DEBUG nova.compute.manager [req-b746e120-67de-429d-83a2-d108209ad164 req-13929574-fb71-4e63-856b-4a0be6845524 service nova] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Received event network-vif-plugged-7d31f9c2-3052-4e8a-b932-8aa226e03b49 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 683.580299] env[62522]: DEBUG oslo_concurrency.lockutils [req-b746e120-67de-429d-83a2-d108209ad164 req-13929574-fb71-4e63-856b-4a0be6845524 service nova] Acquiring lock "6d8b5429-113b-4280-9851-bf6614dde4a7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 683.580515] env[62522]: DEBUG oslo_concurrency.lockutils [req-b746e120-67de-429d-83a2-d108209ad164 req-13929574-fb71-4e63-856b-4a0be6845524 service nova] Lock "6d8b5429-113b-4280-9851-bf6614dde4a7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 683.580708] env[62522]: DEBUG oslo_concurrency.lockutils [req-b746e120-67de-429d-83a2-d108209ad164 req-13929574-fb71-4e63-856b-4a0be6845524 service nova] Lock "6d8b5429-113b-4280-9851-bf6614dde4a7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 683.580881] env[62522]: DEBUG nova.compute.manager [req-b746e120-67de-429d-83a2-d108209ad164 req-13929574-fb71-4e63-856b-4a0be6845524 service nova] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] No waiting events found dispatching network-vif-plugged-7d31f9c2-3052-4e8a-b932-8aa226e03b49 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 683.581781] env[62522]: WARNING nova.compute.manager [req-b746e120-67de-429d-83a2-d108209ad164 req-13929574-fb71-4e63-856b-4a0be6845524 service nova] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Received unexpected event network-vif-plugged-7d31f9c2-3052-4e8a-b932-8aa226e03b49 for instance with vm_state building and task_state spawning. [ 683.582275] env[62522]: DEBUG nova.compute.manager [req-b746e120-67de-429d-83a2-d108209ad164 req-13929574-fb71-4e63-856b-4a0be6845524 service nova] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Received event network-changed-7d31f9c2-3052-4e8a-b932-8aa226e03b49 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 683.582275] env[62522]: DEBUG nova.compute.manager [req-b746e120-67de-429d-83a2-d108209ad164 req-13929574-fb71-4e63-856b-4a0be6845524 service nova] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Refreshing instance network info cache due to event network-changed-7d31f9c2-3052-4e8a-b932-8aa226e03b49. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 683.582347] env[62522]: DEBUG oslo_concurrency.lockutils [req-b746e120-67de-429d-83a2-d108209ad164 req-13929574-fb71-4e63-856b-4a0be6845524 service nova] Acquiring lock "refresh_cache-6d8b5429-113b-4280-9851-bf6614dde4a7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 683.587689] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.710s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 683.588218] env[62522]: DEBUG nova.compute.manager [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 683.590615] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.678s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 683.594127] env[62522]: INFO nova.compute.claims [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 683.630021] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Releasing lock "refresh_cache-6d8b5429-113b-4280-9851-bf6614dde4a7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 683.630021] env[62522]: DEBUG nova.compute.manager [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Instance network_info: |[{"id": "7d31f9c2-3052-4e8a-b932-8aa226e03b49", "address": "fa:16:3e:cd:a2:bd", "network": {"id": "6cb7cdaa-cbcd-4565-a222-310242ff25b1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-684499993-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e47f8c538134439d8405e2825ad0af22", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0746f464-a938-427b-ba02-600449df5070", "external-id": "nsx-vlan-transportzone-881", "segmentation_id": 881, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d31f9c2-30", "ovs_interfaceid": "7d31f9c2-3052-4e8a-b932-8aa226e03b49", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 683.630488] env[62522]: DEBUG oslo_vmware.api [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Task: {'id': task-2415112, 'name': PowerOffVM_Task, 'duration_secs': 0.313309} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.630488] env[62522]: DEBUG oslo_concurrency.lockutils [req-b746e120-67de-429d-83a2-d108209ad164 req-13929574-fb71-4e63-856b-4a0be6845524 service nova] Acquired lock "refresh_cache-6d8b5429-113b-4280-9851-bf6614dde4a7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.630488] env[62522]: DEBUG nova.network.neutron [req-b746e120-67de-429d-83a2-d108209ad164 req-13929574-fb71-4e63-856b-4a0be6845524 service nova] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Refreshing network info cache for port 7d31f9c2-3052-4e8a-b932-8aa226e03b49 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 683.630628] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:a2:bd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0746f464-a938-427b-ba02-600449df5070', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7d31f9c2-3052-4e8a-b932-8aa226e03b49', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 683.642113] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Creating folder: Project (e47f8c538134439d8405e2825ad0af22). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 683.642503] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 683.642711] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 683.643191] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fc7927d1-36e4-4137-aefa-ba02272fc18d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.645403] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d3b2b3ad-6ada-4da7-8578-22516020558b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.648207] env[62522]: DEBUG oslo_concurrency.lockutils [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 683.661060] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Created folder: Project (e47f8c538134439d8405e2825ad0af22) in parent group-v489562. [ 683.661060] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Creating folder: Instances. Parent ref: group-v489615. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 683.661060] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-20abf99b-cf6a-4f75-8375-bc1db7f1f30b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.670141] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Created folder: Instances in parent group-v489615. [ 683.670385] env[62522]: DEBUG oslo.service.loopingcall [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 683.670578] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 683.672227] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f91b5165-6b1c-4ab5-85fa-d7e9a92a79e9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.698113] env[62522]: DEBUG oslo_vmware.api [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2415111, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.835023} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.699530] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a/7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 683.699762] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 683.700007] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 683.700007] env[62522]: value = "task-2415116" [ 683.700007] env[62522]: _type = "Task" [ 683.700007] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.700196] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5feefdf6-04bb-4f8c-8edf-54e34c12a1ae {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.712178] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415116, 'name': CreateVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.714743] env[62522]: DEBUG oslo_vmware.api [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Waiting for the task: (returnval){ [ 683.714743] env[62522]: value = "task-2415117" [ 683.714743] env[62522]: _type = "Task" [ 683.714743] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.714953] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 683.715158] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 683.715358] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Deleting the datastore file [datastore1] 4de70165-c28f-44b7-a01a-caa0787170b8 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 683.715843] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-921e09b2-7f78-4ee9-889c-c17d03fc3988 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.730449] env[62522]: DEBUG oslo_vmware.api [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2415117, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.732184] env[62522]: DEBUG oslo_vmware.api [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Waiting for the task: (returnval){ [ 683.732184] env[62522]: value = "task-2415118" [ 683.732184] env[62522]: _type = "Task" [ 683.732184] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.743600] env[62522]: DEBUG oslo_vmware.api [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Task: {'id': task-2415118, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.101034] env[62522]: DEBUG nova.compute.utils [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 684.105921] env[62522]: DEBUG nova.compute.manager [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 684.105921] env[62522]: DEBUG nova.network.neutron [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 684.158143] env[62522]: DEBUG nova.network.neutron [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Successfully updated port: 80d82c33-bbd9-41b4-ba21-705502101cf8 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 684.843356] env[62522]: DEBUG nova.policy [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '40504d2538e34ec2b02cc43b616aafbd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91dee2b9e8bd456cbb55667383b0058d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 684.847575] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 684.850244] env[62522]: DEBUG nova.compute.manager [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 684.855019] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquiring lock "refresh_cache-68b4c229-0ace-486f-9a99-d3c955b7bdfb" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 684.855019] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquired lock "refresh_cache-68b4c229-0ace-486f-9a99-d3c955b7bdfb" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.855019] env[62522]: DEBUG nova.network.neutron [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 684.855704] env[62522]: DEBUG nova.compute.manager [req-aa8f7ffc-fe49-4fee-ab7f-b15e7b110125 req-6b6575a6-c381-4268-be83-24997f323795 service nova] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Received event network-vif-plugged-80d82c33-bbd9-41b4-ba21-705502101cf8 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 684.855861] env[62522]: DEBUG oslo_concurrency.lockutils [req-aa8f7ffc-fe49-4fee-ab7f-b15e7b110125 req-6b6575a6-c381-4268-be83-24997f323795 service nova] Acquiring lock "68b4c229-0ace-486f-9a99-d3c955b7bdfb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 684.856057] env[62522]: DEBUG oslo_concurrency.lockutils [req-aa8f7ffc-fe49-4fee-ab7f-b15e7b110125 req-6b6575a6-c381-4268-be83-24997f323795 service nova] Lock "68b4c229-0ace-486f-9a99-d3c955b7bdfb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 684.856235] env[62522]: DEBUG oslo_concurrency.lockutils [req-aa8f7ffc-fe49-4fee-ab7f-b15e7b110125 req-6b6575a6-c381-4268-be83-24997f323795 service nova] Lock "68b4c229-0ace-486f-9a99-d3c955b7bdfb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 684.856408] env[62522]: DEBUG nova.compute.manager [req-aa8f7ffc-fe49-4fee-ab7f-b15e7b110125 req-6b6575a6-c381-4268-be83-24997f323795 service nova] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] No waiting events found dispatching network-vif-plugged-80d82c33-bbd9-41b4-ba21-705502101cf8 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 684.856566] env[62522]: WARNING nova.compute.manager [req-aa8f7ffc-fe49-4fee-ab7f-b15e7b110125 req-6b6575a6-c381-4268-be83-24997f323795 service nova] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Received unexpected event network-vif-plugged-80d82c33-bbd9-41b4-ba21-705502101cf8 for instance with vm_state building and task_state spawning. [ 684.865316] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c5cfbfcd-8ec2-43fd-9a57-5e4d24668a37 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.872069] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415116, 'name': CreateVM_Task, 'duration_secs': 0.481288} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.874066] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 684.880078] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 684.880260] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.880595] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 684.881475] env[62522]: DEBUG oslo_vmware.api [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2415117, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070997} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.881475] env[62522]: DEBUG oslo_vmware.api [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Task: {'id': task-2415118, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.231094} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.881940] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e22f0236-700a-4df5-851c-082e172b41bb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.883671] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 684.883970] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 684.884163] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 684.888017] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 684.888017] env[62522]: INFO nova.compute.manager [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Took 1.81 seconds to destroy the instance on the hypervisor. [ 684.888017] env[62522]: DEBUG oslo.service.loopingcall [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 684.888017] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29242acc-7738-4923-9e9b-52f4e5d19be0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.888403] env[62522]: DEBUG nova.compute.manager [-] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 684.888510] env[62522]: DEBUG nova.network.neutron [-] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 684.891779] env[62522]: DEBUG oslo_vmware.api [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Waiting for the task: (returnval){ [ 684.891779] env[62522]: value = "task-2415119" [ 684.891779] env[62522]: _type = "Task" [ 684.891779] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.897561] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 684.897561] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5221092d-19b5-f48b-0679-ccb875fef826" [ 684.897561] env[62522]: _type = "Task" [ 684.897561] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.920192] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Reconfiguring VM instance instance-00000010 to attach disk [datastore2] 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a/7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 684.924248] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c1b03fa-704a-41b7-b591-f8505ac01c10 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.941889] env[62522]: DEBUG oslo_vmware.api [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415119, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.950344] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5221092d-19b5-f48b-0679-ccb875fef826, 'name': SearchDatastore_Task, 'duration_secs': 0.020256} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.951668] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 684.951909] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 684.952146] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 684.952295] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.952473] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 684.952803] env[62522]: DEBUG oslo_vmware.api [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Waiting for the task: (returnval){ [ 684.952803] env[62522]: value = "task-2415120" [ 684.952803] env[62522]: _type = "Task" [ 684.952803] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.952997] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-682e8e55-97a8-4006-830e-2698667b5078 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.963010] env[62522]: DEBUG oslo_vmware.api [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2415120, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.968988] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 684.969191] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 684.969908] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f66f2373-43b4-4bb8-a5fd-0f15e98bbe23 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.975132] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 684.975132] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d0e822-e12d-811b-9b3a-b41454e1626d" [ 684.975132] env[62522]: _type = "Task" [ 684.975132] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.982995] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d0e822-e12d-811b-9b3a-b41454e1626d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.409447] env[62522]: DEBUG oslo_vmware.api [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415119, 'name': PowerOffVM_Task, 'duration_secs': 0.132708} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.411241] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 685.411241] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 685.412031] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95415c56-d46b-4159-90a3-6eda4449bcf9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.415814] env[62522]: DEBUG nova.network.neutron [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 685.421076] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 685.423528] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-553c586c-d2a2-47f7-9907-47498987d077 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.449741] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 685.449958] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 685.450159] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Deleting the datastore file [datastore1] c73686c6-4dd8-4f00-a65a-5d8574409ad1 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 685.450742] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ac81deed-606e-4ce7-ac8d-11b33bb9da35 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.462289] env[62522]: DEBUG oslo_vmware.api [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Waiting for the task: (returnval){ [ 685.462289] env[62522]: value = "task-2415122" [ 685.462289] env[62522]: _type = "Task" [ 685.462289] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.472147] env[62522]: DEBUG oslo_vmware.api [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2415120, 'name': ReconfigVM_Task, 'duration_secs': 0.286702} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.472831] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Reconfigured VM instance instance-00000010 to attach disk [datastore2] 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a/7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 685.476097] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f714169d-2fee-4f2e-8443-1efb2694fcad {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.486713] env[62522]: DEBUG oslo_vmware.api [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415122, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.492815] env[62522]: DEBUG oslo_vmware.api [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Waiting for the task: (returnval){ [ 685.492815] env[62522]: value = "task-2415123" [ 685.492815] env[62522]: _type = "Task" [ 685.492815] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.496057] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d0e822-e12d-811b-9b3a-b41454e1626d, 'name': SearchDatastore_Task, 'duration_secs': 0.013439} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.499738] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f6cc553-4927-4468-9661-938c28dd1c75 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.505569] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-866cbb94-b58a-416a-9e88-49f11b581e79 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.511030] env[62522]: DEBUG oslo_vmware.api [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2415123, 'name': Rename_Task} progress is 10%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.513371] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 685.513371] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5269f4e0-b3b2-3180-a8b9-27ef2fefb48a" [ 685.513371] env[62522]: _type = "Task" [ 685.513371] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.518995] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-184dba69-f7cf-466e-aeee-47e404cb47ca {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.524655] env[62522]: DEBUG nova.network.neutron [req-b746e120-67de-429d-83a2-d108209ad164 req-13929574-fb71-4e63-856b-4a0be6845524 service nova] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Updated VIF entry in instance network info cache for port 7d31f9c2-3052-4e8a-b932-8aa226e03b49. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 685.525009] env[62522]: DEBUG nova.network.neutron [req-b746e120-67de-429d-83a2-d108209ad164 req-13929574-fb71-4e63-856b-4a0be6845524 service nova] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Updating instance_info_cache with network_info: [{"id": "7d31f9c2-3052-4e8a-b932-8aa226e03b49", "address": "fa:16:3e:cd:a2:bd", "network": {"id": "6cb7cdaa-cbcd-4565-a222-310242ff25b1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-684499993-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e47f8c538134439d8405e2825ad0af22", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0746f464-a938-427b-ba02-600449df5070", "external-id": "nsx-vlan-transportzone-881", "segmentation_id": 881, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d31f9c2-30", "ovs_interfaceid": "7d31f9c2-3052-4e8a-b932-8aa226e03b49", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.534318] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5269f4e0-b3b2-3180-a8b9-27ef2fefb48a, 'name': SearchDatastore_Task, 'duration_secs': 0.009694} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.558747] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 685.559057] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 6d8b5429-113b-4280-9851-bf6614dde4a7/6d8b5429-113b-4280-9851-bf6614dde4a7.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 685.560771] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-21d66bb0-9851-4587-8118-f7da099eebb7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.563118] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac66e90-f2e4-4765-a716-023accb70ca5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.573712] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-984a5988-be1e-4538-bc60-8332a4452767 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.577732] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 685.577732] env[62522]: value = "task-2415124" [ 685.577732] env[62522]: _type = "Task" [ 685.577732] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.591943] env[62522]: DEBUG nova.compute.provider_tree [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 685.598616] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415124, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.633913] env[62522]: DEBUG nova.network.neutron [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Updating instance_info_cache with network_info: [{"id": "80d82c33-bbd9-41b4-ba21-705502101cf8", "address": "fa:16:3e:10:89:a2", "network": {"id": "6cb7cdaa-cbcd-4565-a222-310242ff25b1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-684499993-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e47f8c538134439d8405e2825ad0af22", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0746f464-a938-427b-ba02-600449df5070", "external-id": "nsx-vlan-transportzone-881", "segmentation_id": 881, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80d82c33-bb", "ovs_interfaceid": "80d82c33-bbd9-41b4-ba21-705502101cf8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.704472] env[62522]: DEBUG oslo_concurrency.lockutils [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "ee1c638b-1f38-4e21-9369-4d4ff2e13d46" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 685.704934] env[62522]: DEBUG oslo_concurrency.lockutils [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "ee1c638b-1f38-4e21-9369-4d4ff2e13d46" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 685.705217] env[62522]: DEBUG nova.network.neutron [-] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.860406] env[62522]: DEBUG nova.compute.manager [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 685.905667] env[62522]: DEBUG nova.network.neutron [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Successfully created port: cd619060-5655-434c-967f-7552adca021b {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 685.913281] env[62522]: DEBUG nova.virt.hardware [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 685.914209] env[62522]: DEBUG nova.virt.hardware [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 685.914209] env[62522]: DEBUG nova.virt.hardware [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 685.914209] env[62522]: DEBUG nova.virt.hardware [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 685.914209] env[62522]: DEBUG nova.virt.hardware [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 685.914209] env[62522]: DEBUG nova.virt.hardware [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 685.915214] env[62522]: DEBUG nova.virt.hardware [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 685.915214] env[62522]: DEBUG nova.virt.hardware [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 685.915214] env[62522]: DEBUG nova.virt.hardware [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 685.915214] env[62522]: DEBUG nova.virt.hardware [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 685.915214] env[62522]: DEBUG nova.virt.hardware [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 685.918728] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca10ecbe-17bc-4c89-b0c1-4f76bc3f2ff8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.925070] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9c6e519-ff4d-4067-ace8-8f24bcd85be6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.973184] env[62522]: DEBUG oslo_vmware.api [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415122, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.098614} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.973462] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 685.973641] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 685.973812] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 686.007094] env[62522]: DEBUG oslo_vmware.api [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2415123, 'name': Rename_Task, 'duration_secs': 0.133873} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.007398] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 686.007636] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-45613ab2-6b38-4e17-a63e-fb4d0992d4d6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.014667] env[62522]: DEBUG oslo_vmware.api [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Waiting for the task: (returnval){ [ 686.014667] env[62522]: value = "task-2415125" [ 686.014667] env[62522]: _type = "Task" [ 686.014667] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.025355] env[62522]: DEBUG oslo_vmware.api [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2415125, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.029511] env[62522]: DEBUG oslo_concurrency.lockutils [req-b746e120-67de-429d-83a2-d108209ad164 req-13929574-fb71-4e63-856b-4a0be6845524 service nova] Releasing lock "refresh_cache-6d8b5429-113b-4280-9851-bf6614dde4a7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 686.088117] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415124, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.454838} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.088415] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 6d8b5429-113b-4280-9851-bf6614dde4a7/6d8b5429-113b-4280-9851-bf6614dde4a7.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 686.088628] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 686.088875] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-af4f5685-91b4-4227-95ba-ef6958d843d6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.095783] env[62522]: DEBUG nova.scheduler.client.report [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 686.100210] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 686.100210] env[62522]: value = "task-2415126" [ 686.100210] env[62522]: _type = "Task" [ 686.100210] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.110622] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415126, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.136989] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Releasing lock "refresh_cache-68b4c229-0ace-486f-9a99-d3c955b7bdfb" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 686.137492] env[62522]: DEBUG nova.compute.manager [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Instance network_info: |[{"id": "80d82c33-bbd9-41b4-ba21-705502101cf8", "address": "fa:16:3e:10:89:a2", "network": {"id": "6cb7cdaa-cbcd-4565-a222-310242ff25b1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-684499993-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e47f8c538134439d8405e2825ad0af22", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0746f464-a938-427b-ba02-600449df5070", "external-id": "nsx-vlan-transportzone-881", "segmentation_id": 881, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80d82c33-bb", "ovs_interfaceid": "80d82c33-bbd9-41b4-ba21-705502101cf8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 686.138023] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:10:89:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0746f464-a938-427b-ba02-600449df5070', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '80d82c33-bbd9-41b4-ba21-705502101cf8', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 686.146357] env[62522]: DEBUG oslo.service.loopingcall [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 686.146660] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 686.147016] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-47f4eb4f-237a-4afc-a36e-c8eee879afde {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.170730] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 686.170730] env[62522]: value = "task-2415127" [ 686.170730] env[62522]: _type = "Task" [ 686.170730] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.178759] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415127, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.209047] env[62522]: INFO nova.compute.manager [-] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Took 1.32 seconds to deallocate network for instance. [ 686.526365] env[62522]: DEBUG oslo_vmware.api [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2415125, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.618440] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.028s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 686.619040] env[62522]: DEBUG nova.compute.manager [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 686.622545] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415126, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069753} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.623243] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.589s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 686.623243] env[62522]: DEBUG nova.objects.instance [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Lazy-loading 'resources' on Instance uuid a5657a70-5374-4d52-be9a-2d05f9556d16 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 686.624499] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 686.626812] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcccd1e8-2ddd-4770-8c14-0e1a8afb4942 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.654847] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Reconfiguring VM instance instance-00000012 to attach disk [datastore2] 6d8b5429-113b-4280-9851-bf6614dde4a7/6d8b5429-113b-4280-9851-bf6614dde4a7.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 686.656438] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-54c96b12-79b1-4661-bc26-2affe660264c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.683113] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415127, 'name': CreateVM_Task, 'duration_secs': 0.392518} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.684708] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 686.685329] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 686.685329] env[62522]: value = "task-2415128" [ 686.685329] env[62522]: _type = "Task" [ 686.685329] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.686304] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 686.686367] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.686678] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 686.687024] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d9141ed-3fea-41b7-8dff-f8d1d2c4dee7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.695676] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 686.695676] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52fb705e-e598-49b0-74c8-4f41962fbad7" [ 686.695676] env[62522]: _type = "Task" [ 686.695676] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.699356] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415128, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.708780] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52fb705e-e598-49b0-74c8-4f41962fbad7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.715635] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.014062] env[62522]: DEBUG nova.virt.hardware [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 687.014062] env[62522]: DEBUG nova.virt.hardware [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 687.014062] env[62522]: DEBUG nova.virt.hardware [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 687.014062] env[62522]: DEBUG nova.virt.hardware [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 687.014307] env[62522]: DEBUG nova.virt.hardware [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 687.014724] env[62522]: DEBUG nova.virt.hardware [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 687.015140] env[62522]: DEBUG nova.virt.hardware [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 687.015923] env[62522]: DEBUG nova.virt.hardware [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 687.016373] env[62522]: DEBUG nova.virt.hardware [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 687.016686] env[62522]: DEBUG nova.virt.hardware [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 687.017013] env[62522]: DEBUG nova.virt.hardware [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 687.018023] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8dbc8d1-e0a4-4282-a58e-0235c26c8be4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.032209] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af176139-becb-4fb8-a497-dfc3c78636cb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.036091] env[62522]: DEBUG oslo_vmware.api [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2415125, 'name': PowerOnVM_Task, 'duration_secs': 0.5543} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.038201] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 687.038201] env[62522]: INFO nova.compute.manager [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Took 9.38 seconds to spawn the instance on the hypervisor. [ 687.038201] env[62522]: DEBUG nova.compute.manager [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 687.038420] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1655f029-d25b-4ed1-b25c-d3fdf07b0684 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.054172] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Instance VIF info [] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 687.058353] env[62522]: DEBUG oslo.service.loopingcall [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 687.059350] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 687.059800] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-97c8039e-15fe-4387-8983-f660afe46286 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.080999] env[62522]: DEBUG nova.compute.manager [req-5d5a9afd-548e-485d-8eb8-93d29d73c550 req-0d833ebd-4e8f-45d7-8756-fad9053acd7e service nova] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Received event network-changed-80d82c33-bbd9-41b4-ba21-705502101cf8 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 687.081363] env[62522]: DEBUG nova.compute.manager [req-5d5a9afd-548e-485d-8eb8-93d29d73c550 req-0d833ebd-4e8f-45d7-8756-fad9053acd7e service nova] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Refreshing instance network info cache due to event network-changed-80d82c33-bbd9-41b4-ba21-705502101cf8. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 687.081682] env[62522]: DEBUG oslo_concurrency.lockutils [req-5d5a9afd-548e-485d-8eb8-93d29d73c550 req-0d833ebd-4e8f-45d7-8756-fad9053acd7e service nova] Acquiring lock "refresh_cache-68b4c229-0ace-486f-9a99-d3c955b7bdfb" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 687.082187] env[62522]: DEBUG oslo_concurrency.lockutils [req-5d5a9afd-548e-485d-8eb8-93d29d73c550 req-0d833ebd-4e8f-45d7-8756-fad9053acd7e service nova] Acquired lock "refresh_cache-68b4c229-0ace-486f-9a99-d3c955b7bdfb" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.082542] env[62522]: DEBUG nova.network.neutron [req-5d5a9afd-548e-485d-8eb8-93d29d73c550 req-0d833ebd-4e8f-45d7-8756-fad9053acd7e service nova] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Refreshing network info cache for port 80d82c33-bbd9-41b4-ba21-705502101cf8 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 687.095076] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 687.095076] env[62522]: value = "task-2415129" [ 687.095076] env[62522]: _type = "Task" [ 687.095076] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.101915] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415129, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.125122] env[62522]: DEBUG nova.compute.utils [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 687.130023] env[62522]: DEBUG nova.compute.manager [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 687.130023] env[62522]: DEBUG nova.network.neutron [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 687.190815] env[62522]: DEBUG nova.policy [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab5e5a8e6ee64aad8d52342ee3f5af36', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4bdd1f5caf09454d808bcdc15df2d3a7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 687.202490] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415128, 'name': ReconfigVM_Task, 'duration_secs': 0.265101} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.205440] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Reconfigured VM instance instance-00000012 to attach disk [datastore2] 6d8b5429-113b-4280-9851-bf6614dde4a7/6d8b5429-113b-4280-9851-bf6614dde4a7.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 687.206076] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b7ea9536-db27-46b4-98d5-88a9989e84de {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.213313] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52fb705e-e598-49b0-74c8-4f41962fbad7, 'name': SearchDatastore_Task, 'duration_secs': 0.019514} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.218152] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 687.218478] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 687.218762] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 687.218919] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.219127] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 687.220148] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 687.220148] env[62522]: value = "task-2415130" [ 687.220148] env[62522]: _type = "Task" [ 687.220148] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.220629] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a07dfc6-e090-4af1-b5ad-8fe2e9fac21a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.236870] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415130, 'name': Rename_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.247269] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 687.247469] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 687.248306] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-106f4e86-55c2-4d6b-931e-0154d3c87f2d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.264853] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 687.264853] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5264a69f-135b-a919-8642-e2930f1f0b58" [ 687.264853] env[62522]: _type = "Task" [ 687.264853] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.272714] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5264a69f-135b-a919-8642-e2930f1f0b58, 'name': SearchDatastore_Task, 'duration_secs': 0.010328} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.273860] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ed48835-3f12-4b11-b2da-1486d86be82b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.281862] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 687.281862] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b5b234-8ee5-0ebc-e764-d20943e30c92" [ 687.281862] env[62522]: _type = "Task" [ 687.281862] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.292178] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b5b234-8ee5-0ebc-e764-d20943e30c92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.448516] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "713dd924-1c96-496a-bd06-cf0235dd6f75" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.448740] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "713dd924-1c96-496a-bd06-cf0235dd6f75" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 687.605650] env[62522]: DEBUG nova.network.neutron [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Successfully created port: bb09cad6-a323-4801-8cb8-7e58b646a38e {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 687.609176] env[62522]: INFO nova.compute.manager [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Took 40.36 seconds to build instance. [ 687.616533] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415129, 'name': CreateVM_Task, 'duration_secs': 0.360378} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.616624] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 687.617166] env[62522]: DEBUG oslo_concurrency.lockutils [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 687.617402] env[62522]: DEBUG oslo_concurrency.lockutils [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.617818] env[62522]: DEBUG oslo_concurrency.lockutils [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 687.618959] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26829bfe-c2d8-44b0-aa3d-50377cfc6cf3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.629145] env[62522]: DEBUG oslo_vmware.api [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Waiting for the task: (returnval){ [ 687.629145] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529e8d7f-be67-6d03-9a43-1a9469304e2c" [ 687.629145] env[62522]: _type = "Task" [ 687.629145] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.634748] env[62522]: DEBUG nova.compute.manager [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 687.651704] env[62522]: DEBUG oslo_vmware.api [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529e8d7f-be67-6d03-9a43-1a9469304e2c, 'name': SearchDatastore_Task, 'duration_secs': 0.01184} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.652089] env[62522]: DEBUG oslo_concurrency.lockutils [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 687.652667] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 687.653281] env[62522]: DEBUG oslo_concurrency.lockutils [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 687.732021] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415130, 'name': Rename_Task, 'duration_secs': 0.147707} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.732021] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 687.732021] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d453a035-5ecf-4f3c-b27b-f623498ad339 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.738177] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 687.738177] env[62522]: value = "task-2415131" [ 687.738177] env[62522]: _type = "Task" [ 687.738177] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.745830] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415131, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.795176] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b5b234-8ee5-0ebc-e764-d20943e30c92, 'name': SearchDatastore_Task, 'duration_secs': 0.009847} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.795671] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 687.795937] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 68b4c229-0ace-486f-9a99-d3c955b7bdfb/68b4c229-0ace-486f-9a99-d3c955b7bdfb.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 687.796952] env[62522]: DEBUG oslo_concurrency.lockutils [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.796952] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 687.796952] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-16e1c8fc-9594-4507-9e0e-233860c1490f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.800966] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-941254ab-3226-4cd0-9e91-24280a346dec {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.805806] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 687.805806] env[62522]: value = "task-2415132" [ 687.805806] env[62522]: _type = "Task" [ 687.805806] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.811420] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec660a10-0444-4cae-aadd-e969a4f66685 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.814143] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 687.814451] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 687.815857] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-655938bc-3b74-4ad0-96e9-fa91abdc347b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.821945] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415132, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.828148] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7973afd-218d-4144-a318-76ede87636e6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.832659] env[62522]: DEBUG oslo_vmware.api [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Waiting for the task: (returnval){ [ 687.832659] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52ea1908-adfb-e7ce-9224-6bd45c36192c" [ 687.832659] env[62522]: _type = "Task" [ 687.832659] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.879128] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b193d14-5554-411e-971d-228d35e969d4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.888278] env[62522]: DEBUG oslo_vmware.api [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52ea1908-adfb-e7ce-9224-6bd45c36192c, 'name': SearchDatastore_Task, 'duration_secs': 0.022432} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.889762] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f55368a-1893-4a3a-a37a-3c36a747be8a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.895128] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f22e1a17-5bdc-4607-9f8d-774f7b2b2f6e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.900720] env[62522]: DEBUG oslo_vmware.api [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Waiting for the task: (returnval){ [ 687.900720] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522bc162-028a-ebd6-c24f-b94c0ee71222" [ 687.900720] env[62522]: _type = "Task" [ 687.900720] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.911866] env[62522]: DEBUG nova.compute.provider_tree [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 687.918553] env[62522]: DEBUG oslo_vmware.api [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522bc162-028a-ebd6-c24f-b94c0ee71222, 'name': SearchDatastore_Task, 'duration_secs': 0.011272} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.918707] env[62522]: DEBUG oslo_concurrency.lockutils [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 687.919024] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] c73686c6-4dd8-4f00-a65a-5d8574409ad1/c73686c6-4dd8-4f00-a65a-5d8574409ad1.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 687.919315] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-484d0385-195c-494f-b75e-95f28eae3ba7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.926959] env[62522]: DEBUG oslo_vmware.api [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Waiting for the task: (returnval){ [ 687.926959] env[62522]: value = "task-2415133" [ 687.926959] env[62522]: _type = "Task" [ 687.926959] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.934958] env[62522]: DEBUG oslo_vmware.api [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415133, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.111416] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7c737499-eb4b-4ada-9065-ec284e6e4ef1 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Lock "7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.244s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 688.184507] env[62522]: DEBUG nova.network.neutron [req-5d5a9afd-548e-485d-8eb8-93d29d73c550 req-0d833ebd-4e8f-45d7-8756-fad9053acd7e service nova] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Updated VIF entry in instance network info cache for port 80d82c33-bbd9-41b4-ba21-705502101cf8. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 688.184507] env[62522]: DEBUG nova.network.neutron [req-5d5a9afd-548e-485d-8eb8-93d29d73c550 req-0d833ebd-4e8f-45d7-8756-fad9053acd7e service nova] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Updating instance_info_cache with network_info: [{"id": "80d82c33-bbd9-41b4-ba21-705502101cf8", "address": "fa:16:3e:10:89:a2", "network": {"id": "6cb7cdaa-cbcd-4565-a222-310242ff25b1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-684499993-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e47f8c538134439d8405e2825ad0af22", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0746f464-a938-427b-ba02-600449df5070", "external-id": "nsx-vlan-transportzone-881", "segmentation_id": 881, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80d82c33-bb", "ovs_interfaceid": "80d82c33-bbd9-41b4-ba21-705502101cf8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.255536] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415131, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.320218] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415132, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.353974] env[62522]: DEBUG nova.network.neutron [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Successfully updated port: cd619060-5655-434c-967f-7552adca021b {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 688.377046] env[62522]: DEBUG nova.compute.manager [req-d33c2b2f-d1fd-43d0-ac1d-918beac38408 req-ae1b9e67-ab2c-499f-a041-648cba51ff7b service nova] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Received event network-vif-plugged-cd619060-5655-434c-967f-7552adca021b {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 688.377046] env[62522]: DEBUG oslo_concurrency.lockutils [req-d33c2b2f-d1fd-43d0-ac1d-918beac38408 req-ae1b9e67-ab2c-499f-a041-648cba51ff7b service nova] Acquiring lock "879354d3-7423-41e2-93f6-0d8d3a120170-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 688.379209] env[62522]: DEBUG oslo_concurrency.lockutils [req-d33c2b2f-d1fd-43d0-ac1d-918beac38408 req-ae1b9e67-ab2c-499f-a041-648cba51ff7b service nova] Lock "879354d3-7423-41e2-93f6-0d8d3a120170-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 688.379209] env[62522]: DEBUG oslo_concurrency.lockutils [req-d33c2b2f-d1fd-43d0-ac1d-918beac38408 req-ae1b9e67-ab2c-499f-a041-648cba51ff7b service nova] Lock "879354d3-7423-41e2-93f6-0d8d3a120170-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 688.379209] env[62522]: DEBUG nova.compute.manager [req-d33c2b2f-d1fd-43d0-ac1d-918beac38408 req-ae1b9e67-ab2c-499f-a041-648cba51ff7b service nova] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] No waiting events found dispatching network-vif-plugged-cd619060-5655-434c-967f-7552adca021b {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 688.379209] env[62522]: WARNING nova.compute.manager [req-d33c2b2f-d1fd-43d0-ac1d-918beac38408 req-ae1b9e67-ab2c-499f-a041-648cba51ff7b service nova] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Received unexpected event network-vif-plugged-cd619060-5655-434c-967f-7552adca021b for instance with vm_state building and task_state spawning. [ 688.416024] env[62522]: DEBUG nova.scheduler.client.report [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 688.440051] env[62522]: DEBUG oslo_vmware.api [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415133, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.617814] env[62522]: DEBUG nova.compute.manager [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 688.656658] env[62522]: DEBUG nova.compute.manager [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 688.687787] env[62522]: DEBUG oslo_concurrency.lockutils [req-5d5a9afd-548e-485d-8eb8-93d29d73c550 req-0d833ebd-4e8f-45d7-8756-fad9053acd7e service nova] Releasing lock "refresh_cache-68b4c229-0ace-486f-9a99-d3c955b7bdfb" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 688.687787] env[62522]: DEBUG nova.compute.manager [req-5d5a9afd-548e-485d-8eb8-93d29d73c550 req-0d833ebd-4e8f-45d7-8756-fad9053acd7e service nova] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Received event network-vif-deleted-907f7f2d-f620-423c-bd77-a4685802e879 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 688.694930] env[62522]: DEBUG nova.virt.hardware [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 688.695964] env[62522]: DEBUG nova.virt.hardware [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 688.697208] env[62522]: DEBUG nova.virt.hardware [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 688.697208] env[62522]: DEBUG nova.virt.hardware [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 688.697208] env[62522]: DEBUG nova.virt.hardware [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 688.697208] env[62522]: DEBUG nova.virt.hardware [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 688.697583] env[62522]: DEBUG nova.virt.hardware [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 688.697902] env[62522]: DEBUG nova.virt.hardware [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 688.698259] env[62522]: DEBUG nova.virt.hardware [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 688.698614] env[62522]: DEBUG nova.virt.hardware [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 688.698947] env[62522]: DEBUG nova.virt.hardware [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 688.701018] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efbd5d13-dc37-402a-b7a1-74829576520f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.709326] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c1e41f8-c539-40ab-b832-dd3632b0e7bd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.754699] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415131, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.818266] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415132, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.559866} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.818513] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 68b4c229-0ace-486f-9a99-d3c955b7bdfb/68b4c229-0ace-486f-9a99-d3c955b7bdfb.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 688.818741] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 688.818992] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ac3fc618-ff21-4a36-81a0-7f60d0cb56c0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.828400] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 688.828400] env[62522]: value = "task-2415134" [ 688.828400] env[62522]: _type = "Task" [ 688.828400] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.837868] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415134, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.861941] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquiring lock "refresh_cache-879354d3-7423-41e2-93f6-0d8d3a120170" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 688.862071] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquired lock "refresh_cache-879354d3-7423-41e2-93f6-0d8d3a120170" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.862288] env[62522]: DEBUG nova.network.neutron [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 688.920739] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.298s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 688.925826] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.336s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 688.928298] env[62522]: INFO nova.compute.claims [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 688.940482] env[62522]: DEBUG oslo_vmware.api [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415133, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.768338} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.940482] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] c73686c6-4dd8-4f00-a65a-5d8574409ad1/c73686c6-4dd8-4f00-a65a-5d8574409ad1.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 688.940696] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 688.940932] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c06955b1-4dc3-4db9-8403-a8a9ab0dc6b0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.948425] env[62522]: DEBUG oslo_vmware.api [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Waiting for the task: (returnval){ [ 688.948425] env[62522]: value = "task-2415135" [ 688.948425] env[62522]: _type = "Task" [ 688.948425] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.958945] env[62522]: INFO nova.scheduler.client.report [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Deleted allocations for instance a5657a70-5374-4d52-be9a-2d05f9556d16 [ 688.967200] env[62522]: DEBUG oslo_vmware.api [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415135, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.119136] env[62522]: DEBUG nova.compute.manager [req-122f7da1-574e-489b-8d85-bbffc3690ebc req-5ea96a36-8aa0-4b69-ab4b-01db8f71fe9d service nova] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Received event network-changed-d2781fca-06c0-403d-8704-705de755c0a0 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 689.119447] env[62522]: DEBUG nova.compute.manager [req-122f7da1-574e-489b-8d85-bbffc3690ebc req-5ea96a36-8aa0-4b69-ab4b-01db8f71fe9d service nova] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Refreshing instance network info cache due to event network-changed-d2781fca-06c0-403d-8704-705de755c0a0. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 689.119738] env[62522]: DEBUG oslo_concurrency.lockutils [req-122f7da1-574e-489b-8d85-bbffc3690ebc req-5ea96a36-8aa0-4b69-ab4b-01db8f71fe9d service nova] Acquiring lock "refresh_cache-7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 689.119959] env[62522]: DEBUG oslo_concurrency.lockutils [req-122f7da1-574e-489b-8d85-bbffc3690ebc req-5ea96a36-8aa0-4b69-ab4b-01db8f71fe9d service nova] Acquired lock "refresh_cache-7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.120189] env[62522]: DEBUG nova.network.neutron [req-122f7da1-574e-489b-8d85-bbffc3690ebc req-5ea96a36-8aa0-4b69-ab4b-01db8f71fe9d service nova] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Refreshing network info cache for port d2781fca-06c0-403d-8704-705de755c0a0 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 689.141746] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 689.253906] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415131, 'name': PowerOnVM_Task, 'duration_secs': 1.061311} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.254239] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 689.254463] env[62522]: INFO nova.compute.manager [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Took 8.96 seconds to spawn the instance on the hypervisor. [ 689.254651] env[62522]: DEBUG nova.compute.manager [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 689.255533] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1c7240e-9c8c-4fa3-b9e9-66daba560b3d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.340225] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415134, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074744} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.340533] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 689.342355] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-987a2d00-e855-4185-a1b0-4610f20d1da4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.364971] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Reconfiguring VM instance instance-00000013 to attach disk [datastore2] 68b4c229-0ace-486f-9a99-d3c955b7bdfb/68b4c229-0ace-486f-9a99-d3c955b7bdfb.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 689.367403] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-43e50040-d144-46e3-86c6-8c6465481267 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.392418] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 689.392418] env[62522]: value = "task-2415136" [ 689.392418] env[62522]: _type = "Task" [ 689.392418] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.400596] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415136, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.459341] env[62522]: DEBUG oslo_vmware.api [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415135, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065638} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.460454] env[62522]: DEBUG nova.network.neutron [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 689.462838] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 689.464325] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-160923de-6259-4b3f-8187-48adc1efd8fc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.471472] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9eea8790-4895-4950-9ad2-1b9a9bccaccf tempest-ServerDiagnosticsNegativeTest-1402839449 tempest-ServerDiagnosticsNegativeTest-1402839449-project-member] Lock "a5657a70-5374-4d52-be9a-2d05f9556d16" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.446s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 689.496125] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Reconfiguring VM instance instance-00000011 to attach disk [datastore2] c73686c6-4dd8-4f00-a65a-5d8574409ad1/c73686c6-4dd8-4f00-a65a-5d8574409ad1.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 689.498322] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-64ee2629-861d-444a-8813-a1bc42a1e8b1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.520274] env[62522]: DEBUG oslo_vmware.api [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Waiting for the task: (returnval){ [ 689.520274] env[62522]: value = "task-2415137" [ 689.520274] env[62522]: _type = "Task" [ 689.520274] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.529784] env[62522]: DEBUG oslo_vmware.api [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415137, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.648772] env[62522]: DEBUG nova.network.neutron [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Successfully updated port: bb09cad6-a323-4801-8cb8-7e58b646a38e {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 689.775084] env[62522]: INFO nova.compute.manager [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Took 39.59 seconds to build instance. [ 689.923610] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415136, 'name': ReconfigVM_Task, 'duration_secs': 0.493389} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.924297] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Reconfigured VM instance instance-00000013 to attach disk [datastore2] 68b4c229-0ace-486f-9a99-d3c955b7bdfb/68b4c229-0ace-486f-9a99-d3c955b7bdfb.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 689.925180] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-03bc7820-0849-4f1b-a9bd-710a8b300659 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.941944] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 689.941944] env[62522]: value = "task-2415138" [ 689.941944] env[62522]: _type = "Task" [ 689.941944] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.945980] env[62522]: DEBUG nova.network.neutron [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Updating instance_info_cache with network_info: [{"id": "cd619060-5655-434c-967f-7552adca021b", "address": "fa:16:3e:7d:62:dc", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd619060-56", "ovs_interfaceid": "cd619060-5655-434c-967f-7552adca021b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.975349] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415138, 'name': Rename_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.053747] env[62522]: DEBUG oslo_vmware.api [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415137, 'name': ReconfigVM_Task, 'duration_secs': 0.327897} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.061873] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Reconfigured VM instance instance-00000011 to attach disk [datastore2] c73686c6-4dd8-4f00-a65a-5d8574409ad1/c73686c6-4dd8-4f00-a65a-5d8574409ad1.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 690.061873] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-059dd4e9-a846-4773-89d5-8ce7665c83d5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.072707] env[62522]: DEBUG oslo_vmware.api [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Waiting for the task: (returnval){ [ 690.072707] env[62522]: value = "task-2415139" [ 690.072707] env[62522]: _type = "Task" [ 690.072707] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.079609] env[62522]: DEBUG oslo_vmware.api [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415139, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.152804] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "refresh_cache-19d3d54c-5ba1-420f-b012-a08add8546c9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 690.153056] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired lock "refresh_cache-19d3d54c-5ba1-420f-b012-a08add8546c9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.153123] env[62522]: DEBUG nova.network.neutron [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 690.277205] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "6d8b5429-113b-4280-9851-bf6614dde4a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.835s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 690.459338] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Releasing lock "refresh_cache-879354d3-7423-41e2-93f6-0d8d3a120170" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 690.459815] env[62522]: DEBUG nova.compute.manager [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Instance network_info: |[{"id": "cd619060-5655-434c-967f-7552adca021b", "address": "fa:16:3e:7d:62:dc", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd619060-56", "ovs_interfaceid": "cd619060-5655-434c-967f-7552adca021b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 690.460428] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7d:62:dc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f6d1427-d86b-4371-9172-50e4bb0eb1cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cd619060-5655-434c-967f-7552adca021b', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 690.474364] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Creating folder: Project (91dee2b9e8bd456cbb55667383b0058d). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 690.479095] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-90161aa8-6312-49c1-9c24-8216ba3e853a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.484368] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415138, 'name': Rename_Task, 'duration_secs': 0.22652} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.484981] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 690.485383] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fc046edb-41aa-4696-8bd3-3864efdd1b00 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.490044] env[62522]: DEBUG nova.network.neutron [req-122f7da1-574e-489b-8d85-bbffc3690ebc req-5ea96a36-8aa0-4b69-ab4b-01db8f71fe9d service nova] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Updated VIF entry in instance network info cache for port d2781fca-06c0-403d-8704-705de755c0a0. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 690.490384] env[62522]: DEBUG nova.network.neutron [req-122f7da1-574e-489b-8d85-bbffc3690ebc req-5ea96a36-8aa0-4b69-ab4b-01db8f71fe9d service nova] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Updating instance_info_cache with network_info: [{"id": "d2781fca-06c0-403d-8704-705de755c0a0", "address": "fa:16:3e:74:71:d5", "network": {"id": "2d13676e-cb66-4166-b85f-b3c122b27f67", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1154652116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0dae444f2b5845aa9264fea1f237f0e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2781fca-06", "ovs_interfaceid": "d2781fca-06c0-403d-8704-705de755c0a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.492658] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 690.492658] env[62522]: value = "task-2415141" [ 690.492658] env[62522]: _type = "Task" [ 690.492658] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.493957] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Created folder: Project (91dee2b9e8bd456cbb55667383b0058d) in parent group-v489562. [ 690.494149] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Creating folder: Instances. Parent ref: group-v489620. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 690.496979] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-17755c84-de33-47e4-9025-d387720fb456 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.505353] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415141, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.507038] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Created folder: Instances in parent group-v489620. [ 690.507315] env[62522]: DEBUG oslo.service.loopingcall [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 690.507492] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 690.508478] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0d7e8289-7988-43de-8bee-b35341ad578c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.529577] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 690.529577] env[62522]: value = "task-2415143" [ 690.529577] env[62522]: _type = "Task" [ 690.529577] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.537577] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415143, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.582009] env[62522]: DEBUG oslo_vmware.api [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415139, 'name': Rename_Task, 'duration_secs': 0.133254} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.582299] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 690.582552] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-421060f6-f81d-4200-ab39-57ee0d7493ea {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.588268] env[62522]: DEBUG oslo_vmware.api [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Waiting for the task: (returnval){ [ 690.588268] env[62522]: value = "task-2415144" [ 690.588268] env[62522]: _type = "Task" [ 690.588268] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.594140] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9afdc183-c4a6-46d2-a0da-679dd03b75fd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.602464] env[62522]: DEBUG oslo_vmware.api [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415144, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.606778] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57f86d49-8dc6-4d9c-ac6e-8ebe63f594d7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.645046] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e49f6a35-7c81-4082-921c-9204f71e2706 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.653217] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24d8ee4e-cb9b-43f7-bf81-eb0a1be33fd5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.669712] env[62522]: DEBUG nova.compute.provider_tree [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 690.729756] env[62522]: DEBUG nova.network.neutron [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 690.781093] env[62522]: DEBUG nova.compute.manager [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 690.848510] env[62522]: DEBUG nova.compute.manager [req-b1d6d797-d977-47de-8bc7-b30916ef6c86 req-2542404a-ceae-4220-81bc-0823c111b4c9 service nova] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Received event network-changed-cd619060-5655-434c-967f-7552adca021b {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 690.848510] env[62522]: DEBUG nova.compute.manager [req-b1d6d797-d977-47de-8bc7-b30916ef6c86 req-2542404a-ceae-4220-81bc-0823c111b4c9 service nova] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Refreshing instance network info cache due to event network-changed-cd619060-5655-434c-967f-7552adca021b. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 690.848510] env[62522]: DEBUG oslo_concurrency.lockutils [req-b1d6d797-d977-47de-8bc7-b30916ef6c86 req-2542404a-ceae-4220-81bc-0823c111b4c9 service nova] Acquiring lock "refresh_cache-879354d3-7423-41e2-93f6-0d8d3a120170" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 690.848510] env[62522]: DEBUG oslo_concurrency.lockutils [req-b1d6d797-d977-47de-8bc7-b30916ef6c86 req-2542404a-ceae-4220-81bc-0823c111b4c9 service nova] Acquired lock "refresh_cache-879354d3-7423-41e2-93f6-0d8d3a120170" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.848510] env[62522]: DEBUG nova.network.neutron [req-b1d6d797-d977-47de-8bc7-b30916ef6c86 req-2542404a-ceae-4220-81bc-0823c111b4c9 service nova] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Refreshing network info cache for port cd619060-5655-434c-967f-7552adca021b {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 690.994852] env[62522]: DEBUG oslo_concurrency.lockutils [req-122f7da1-574e-489b-8d85-bbffc3690ebc req-5ea96a36-8aa0-4b69-ab4b-01db8f71fe9d service nova] Releasing lock "refresh_cache-7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 691.004620] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415141, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.040459] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415143, 'name': CreateVM_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.052169] env[62522]: DEBUG nova.network.neutron [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Updating instance_info_cache with network_info: [{"id": "bb09cad6-a323-4801-8cb8-7e58b646a38e", "address": "fa:16:3e:c2:3c:c7", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb09cad6-a3", "ovs_interfaceid": "bb09cad6-a323-4801-8cb8-7e58b646a38e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 691.099820] env[62522]: DEBUG oslo_vmware.api [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415144, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.178027] env[62522]: DEBUG nova.scheduler.client.report [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 691.310816] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 691.457735] env[62522]: DEBUG nova.compute.manager [req-13ee05b0-c32f-4b09-a5fd-c75a08736bb1 req-7bb50938-de00-4244-8b17-63b688c286e0 service nova] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Received event network-vif-plugged-bb09cad6-a323-4801-8cb8-7e58b646a38e {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 691.460491] env[62522]: DEBUG oslo_concurrency.lockutils [req-13ee05b0-c32f-4b09-a5fd-c75a08736bb1 req-7bb50938-de00-4244-8b17-63b688c286e0 service nova] Acquiring lock "19d3d54c-5ba1-420f-b012-a08add8546c9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 691.460491] env[62522]: DEBUG oslo_concurrency.lockutils [req-13ee05b0-c32f-4b09-a5fd-c75a08736bb1 req-7bb50938-de00-4244-8b17-63b688c286e0 service nova] Lock "19d3d54c-5ba1-420f-b012-a08add8546c9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 691.460491] env[62522]: DEBUG oslo_concurrency.lockutils [req-13ee05b0-c32f-4b09-a5fd-c75a08736bb1 req-7bb50938-de00-4244-8b17-63b688c286e0 service nova] Lock "19d3d54c-5ba1-420f-b012-a08add8546c9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 691.460491] env[62522]: DEBUG nova.compute.manager [req-13ee05b0-c32f-4b09-a5fd-c75a08736bb1 req-7bb50938-de00-4244-8b17-63b688c286e0 service nova] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] No waiting events found dispatching network-vif-plugged-bb09cad6-a323-4801-8cb8-7e58b646a38e {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 691.460491] env[62522]: WARNING nova.compute.manager [req-13ee05b0-c32f-4b09-a5fd-c75a08736bb1 req-7bb50938-de00-4244-8b17-63b688c286e0 service nova] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Received unexpected event network-vif-plugged-bb09cad6-a323-4801-8cb8-7e58b646a38e for instance with vm_state building and task_state spawning. [ 691.460911] env[62522]: DEBUG nova.compute.manager [req-13ee05b0-c32f-4b09-a5fd-c75a08736bb1 req-7bb50938-de00-4244-8b17-63b688c286e0 service nova] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Received event network-changed-bb09cad6-a323-4801-8cb8-7e58b646a38e {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 691.460911] env[62522]: DEBUG nova.compute.manager [req-13ee05b0-c32f-4b09-a5fd-c75a08736bb1 req-7bb50938-de00-4244-8b17-63b688c286e0 service nova] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Refreshing instance network info cache due to event network-changed-bb09cad6-a323-4801-8cb8-7e58b646a38e. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 691.460911] env[62522]: DEBUG oslo_concurrency.lockutils [req-13ee05b0-c32f-4b09-a5fd-c75a08736bb1 req-7bb50938-de00-4244-8b17-63b688c286e0 service nova] Acquiring lock "refresh_cache-19d3d54c-5ba1-420f-b012-a08add8546c9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 691.504753] env[62522]: DEBUG oslo_vmware.api [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415141, 'name': PowerOnVM_Task, 'duration_secs': 0.980436} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.505045] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 691.505260] env[62522]: INFO nova.compute.manager [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Took 8.60 seconds to spawn the instance on the hypervisor. [ 691.505441] env[62522]: DEBUG nova.compute.manager [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 691.506364] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f147eb6f-eb3e-40c7-8ce7-5aaa35c06685 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.548303] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415143, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.555151] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Releasing lock "refresh_cache-19d3d54c-5ba1-420f-b012-a08add8546c9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 691.555495] env[62522]: DEBUG nova.compute.manager [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Instance network_info: |[{"id": "bb09cad6-a323-4801-8cb8-7e58b646a38e", "address": "fa:16:3e:c2:3c:c7", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb09cad6-a3", "ovs_interfaceid": "bb09cad6-a323-4801-8cb8-7e58b646a38e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 691.555874] env[62522]: DEBUG oslo_concurrency.lockutils [req-13ee05b0-c32f-4b09-a5fd-c75a08736bb1 req-7bb50938-de00-4244-8b17-63b688c286e0 service nova] Acquired lock "refresh_cache-19d3d54c-5ba1-420f-b012-a08add8546c9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.556635] env[62522]: DEBUG nova.network.neutron [req-13ee05b0-c32f-4b09-a5fd-c75a08736bb1 req-7bb50938-de00-4244-8b17-63b688c286e0 service nova] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Refreshing network info cache for port bb09cad6-a323-4801-8cb8-7e58b646a38e {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 691.558037] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:3c:c7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee555dfd-3d1a-4220-89cd-ffba64e4acf0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bb09cad6-a323-4801-8cb8-7e58b646a38e', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 691.566733] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Creating folder: Project (4bdd1f5caf09454d808bcdc15df2d3a7). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 691.568028] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a4dbcd5e-48b0-4889-9b61-9619a096acf4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.581714] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Created folder: Project (4bdd1f5caf09454d808bcdc15df2d3a7) in parent group-v489562. [ 691.581714] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Creating folder: Instances. Parent ref: group-v489623. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 691.581714] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ebea2e6e-292b-45eb-9cdd-c61591d906ee {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.596566] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Created folder: Instances in parent group-v489623. [ 691.597216] env[62522]: DEBUG oslo.service.loopingcall [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 691.597826] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 691.598084] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-90f81416-24c9-49e0-b8f2-26f9af46acfa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.617087] env[62522]: DEBUG oslo_vmware.api [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415144, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.625320] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 691.625320] env[62522]: value = "task-2415147" [ 691.625320] env[62522]: _type = "Task" [ 691.625320] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.632257] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415147, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.684380] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.757s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 691.684380] env[62522]: DEBUG nova.compute.manager [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 691.686079] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.030s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 691.688266] env[62522]: INFO nova.compute.claims [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 691.883046] env[62522]: DEBUG nova.network.neutron [req-b1d6d797-d977-47de-8bc7-b30916ef6c86 req-2542404a-ceae-4220-81bc-0823c111b4c9 service nova] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Updated VIF entry in instance network info cache for port cd619060-5655-434c-967f-7552adca021b. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 691.883046] env[62522]: DEBUG nova.network.neutron [req-b1d6d797-d977-47de-8bc7-b30916ef6c86 req-2542404a-ceae-4220-81bc-0823c111b4c9 service nova] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Updating instance_info_cache with network_info: [{"id": "cd619060-5655-434c-967f-7552adca021b", "address": "fa:16:3e:7d:62:dc", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd619060-56", "ovs_interfaceid": "cd619060-5655-434c-967f-7552adca021b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.022164] env[62522]: INFO nova.compute.manager [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Took 37.42 seconds to build instance. [ 692.042710] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415143, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.105859] env[62522]: DEBUG oslo_vmware.api [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415144, 'name': PowerOnVM_Task, 'duration_secs': 1.047086} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.108690] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 692.109968] env[62522]: DEBUG nova.compute.manager [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 692.111472] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b17eff8d-db1d-451d-bd0c-8c072f83981b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.132835] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415147, 'name': CreateVM_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.194991] env[62522]: DEBUG nova.compute.utils [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 692.199361] env[62522]: DEBUG nova.compute.manager [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 692.199582] env[62522]: DEBUG nova.network.neutron [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 692.318900] env[62522]: DEBUG nova.policy [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a10b77f3502a4e51a5e599b823f08db2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '962664c996f24cf9ae192f79fae18ca4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 692.388339] env[62522]: DEBUG oslo_concurrency.lockutils [req-b1d6d797-d977-47de-8bc7-b30916ef6c86 req-2542404a-ceae-4220-81bc-0823c111b4c9 service nova] Releasing lock "refresh_cache-879354d3-7423-41e2-93f6-0d8d3a120170" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 692.524707] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ebd41b7d-fb39-44a7-86a6-42fc4ae6dedd tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "68b4c229-0ace-486f-9a99-d3c955b7bdfb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.035s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 692.542456] env[62522]: DEBUG nova.network.neutron [req-13ee05b0-c32f-4b09-a5fd-c75a08736bb1 req-7bb50938-de00-4244-8b17-63b688c286e0 service nova] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Updated VIF entry in instance network info cache for port bb09cad6-a323-4801-8cb8-7e58b646a38e. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 692.542456] env[62522]: DEBUG nova.network.neutron [req-13ee05b0-c32f-4b09-a5fd-c75a08736bb1 req-7bb50938-de00-4244-8b17-63b688c286e0 service nova] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Updating instance_info_cache with network_info: [{"id": "bb09cad6-a323-4801-8cb8-7e58b646a38e", "address": "fa:16:3e:c2:3c:c7", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb09cad6-a3", "ovs_interfaceid": "bb09cad6-a323-4801-8cb8-7e58b646a38e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.548734] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415143, 'name': CreateVM_Task, 'duration_secs': 1.686742} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.549719] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 692.550444] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 692.550653] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.551016] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 692.551562] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2bb215ca-b468-47b4-804e-c03e456f90f8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.556777] env[62522]: DEBUG oslo_vmware.api [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 692.556777] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bd0088-2d97-4ba9-fbb0-9db923be4c35" [ 692.556777] env[62522]: _type = "Task" [ 692.556777] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.566261] env[62522]: DEBUG oslo_vmware.api [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bd0088-2d97-4ba9-fbb0-9db923be4c35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.630937] env[62522]: DEBUG oslo_concurrency.lockutils [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 692.637147] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415147, 'name': CreateVM_Task, 'duration_secs': 0.713173} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.637207] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 692.637844] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 692.639657] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.639657] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 692.639657] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aaee7bd9-687d-4f4a-aa06-9652993995b6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.643536] env[62522]: DEBUG oslo_vmware.api [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 692.643536] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52360c3a-7ec1-5795-458e-d6d887b0e3b2" [ 692.643536] env[62522]: _type = "Task" [ 692.643536] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.651524] env[62522]: DEBUG oslo_vmware.api [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52360c3a-7ec1-5795-458e-d6d887b0e3b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.700173] env[62522]: DEBUG nova.compute.manager [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 692.875256] env[62522]: DEBUG nova.network.neutron [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Successfully created port: 2e33c70f-036d-459c-a393-f570cbf7089c {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 693.027732] env[62522]: DEBUG nova.compute.manager [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 693.050689] env[62522]: DEBUG oslo_concurrency.lockutils [req-13ee05b0-c32f-4b09-a5fd-c75a08736bb1 req-7bb50938-de00-4244-8b17-63b688c286e0 service nova] Releasing lock "refresh_cache-19d3d54c-5ba1-420f-b012-a08add8546c9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 693.075156] env[62522]: DEBUG oslo_vmware.api [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bd0088-2d97-4ba9-fbb0-9db923be4c35, 'name': SearchDatastore_Task, 'duration_secs': 0.046332} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.075851] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 693.075851] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 693.076009] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 693.076441] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.076551] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 693.076945] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d3e19b16-2b81-4ba2-8ec2-b4f7eb55967e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.089558] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 693.089558] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 693.089774] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c50f1de9-6f59-4d26-b1bf-7bc0f07a47c9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.095662] env[62522]: DEBUG oslo_vmware.api [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 693.095662] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e448bd-8e77-7f04-f565-eef032d93eb5" [ 693.095662] env[62522]: _type = "Task" [ 693.095662] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.107312] env[62522]: DEBUG oslo_vmware.api [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e448bd-8e77-7f04-f565-eef032d93eb5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.161399] env[62522]: DEBUG oslo_vmware.api [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52360c3a-7ec1-5795-458e-d6d887b0e3b2, 'name': SearchDatastore_Task, 'duration_secs': 0.027859} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.161399] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 693.161631] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 693.161891] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 693.162048] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.162232] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 693.162522] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-26f92e28-7ada-4313-9080-b10c0f8dea77 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.187982] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 693.188240] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 693.189475] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-747317b3-39d0-4256-bea3-5a6681fc6fcc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.205617] env[62522]: DEBUG oslo_vmware.api [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 693.205617] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5256c082-8f86-ae8a-a624-68f85cad565c" [ 693.205617] env[62522]: _type = "Task" [ 693.205617] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.232357] env[62522]: DEBUG oslo_vmware.api [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5256c082-8f86-ae8a-a624-68f85cad565c, 'name': SearchDatastore_Task, 'duration_secs': 0.009064} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.234325] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31aedc73-9f7c-4ef9-809a-c4a1ab1c348d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.242895] env[62522]: DEBUG oslo_vmware.api [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 693.242895] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52ab8346-4d21-2fc1-310c-45f6c2c9a2b1" [ 693.242895] env[62522]: _type = "Task" [ 693.242895] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.253230] env[62522]: DEBUG oslo_vmware.api [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52ab8346-4d21-2fc1-310c-45f6c2c9a2b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.357173] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquiring lock "6d8b5429-113b-4280-9851-bf6614dde4a7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 693.357459] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "6d8b5429-113b-4280-9851-bf6614dde4a7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 693.357677] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquiring lock "6d8b5429-113b-4280-9851-bf6614dde4a7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 693.357881] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "6d8b5429-113b-4280-9851-bf6614dde4a7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 693.358070] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "6d8b5429-113b-4280-9851-bf6614dde4a7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 693.364306] env[62522]: INFO nova.compute.manager [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Terminating instance [ 693.401733] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c580a33-722f-4310-a6c6-28b55e0513f0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.410454] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-179fc1c8-968f-4a16-9682-166a66114c80 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.448018] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-544b595b-20be-4bc3-8de2-719921c11ad6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.455041] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Acquiring lock "566c207c-5506-4410-98ab-aee9fdbc5d6e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 693.455041] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Lock "566c207c-5506-4410-98ab-aee9fdbc5d6e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 693.460593] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9b26aed-6daf-41a9-8f5b-e714a3ba7ca8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.474861] env[62522]: DEBUG nova.compute.provider_tree [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 693.559461] env[62522]: DEBUG oslo_concurrency.lockutils [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 693.608201] env[62522]: DEBUG oslo_vmware.api [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e448bd-8e77-7f04-f565-eef032d93eb5, 'name': SearchDatastore_Task, 'duration_secs': 0.010224} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.608201] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61a75698-649c-44c9-b2ed-f1887a935feb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.613638] env[62522]: DEBUG oslo_vmware.api [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 693.613638] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a34640-dac4-4943-644c-09b9d928f415" [ 693.613638] env[62522]: _type = "Task" [ 693.613638] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.622222] env[62522]: DEBUG oslo_vmware.api [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a34640-dac4-4943-644c-09b9d928f415, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.735048] env[62522]: DEBUG nova.compute.manager [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 693.753197] env[62522]: DEBUG oslo_vmware.api [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52ab8346-4d21-2fc1-310c-45f6c2c9a2b1, 'name': SearchDatastore_Task, 'duration_secs': 0.010707} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.753197] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 693.753197] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 19d3d54c-5ba1-420f-b012-a08add8546c9/19d3d54c-5ba1-420f-b012-a08add8546c9.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 693.753991] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e8605b79-93f6-4586-9972-1adc8ae79451 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.771829] env[62522]: DEBUG oslo_vmware.api [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 693.771829] env[62522]: value = "task-2415148" [ 693.771829] env[62522]: _type = "Task" [ 693.771829] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.774306] env[62522]: DEBUG nova.virt.hardware [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 693.774548] env[62522]: DEBUG nova.virt.hardware [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 693.775924] env[62522]: DEBUG nova.virt.hardware [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 693.775924] env[62522]: DEBUG nova.virt.hardware [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 693.775924] env[62522]: DEBUG nova.virt.hardware [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 693.775924] env[62522]: DEBUG nova.virt.hardware [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 693.775924] env[62522]: DEBUG nova.virt.hardware [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 693.776170] env[62522]: DEBUG nova.virt.hardware [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 693.776170] env[62522]: DEBUG nova.virt.hardware [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 693.776170] env[62522]: DEBUG nova.virt.hardware [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 693.776283] env[62522]: DEBUG nova.virt.hardware [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 693.777689] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2903a8b-c2cd-4d00-aeee-fa931475920f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.792717] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fdd5583-d4f8-4621-9319-ea7d7e608b1a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.797022] env[62522]: DEBUG oslo_vmware.api [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415148, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.875584] env[62522]: DEBUG nova.compute.manager [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 693.877391] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 693.877391] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c293aaa5-e60b-4fe8-8cec-a26a18dcb4d8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.884907] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 693.885217] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b6c03283-6846-4226-9fae-11887e8e3e74 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.891365] env[62522]: DEBUG oslo_vmware.api [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 693.891365] env[62522]: value = "task-2415149" [ 693.891365] env[62522]: _type = "Task" [ 693.891365] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.899391] env[62522]: DEBUG oslo_vmware.api [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415149, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.982144] env[62522]: DEBUG nova.scheduler.client.report [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 693.985096] env[62522]: DEBUG oslo_concurrency.lockutils [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquiring lock "68b4c229-0ace-486f-9a99-d3c955b7bdfb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 693.985448] env[62522]: DEBUG oslo_concurrency.lockutils [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "68b4c229-0ace-486f-9a99-d3c955b7bdfb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 693.986382] env[62522]: DEBUG oslo_concurrency.lockutils [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquiring lock "68b4c229-0ace-486f-9a99-d3c955b7bdfb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 693.986677] env[62522]: DEBUG oslo_concurrency.lockutils [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "68b4c229-0ace-486f-9a99-d3c955b7bdfb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 693.986866] env[62522]: DEBUG oslo_concurrency.lockutils [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "68b4c229-0ace-486f-9a99-d3c955b7bdfb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 693.992018] env[62522]: INFO nova.compute.manager [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Terminating instance [ 694.127404] env[62522]: DEBUG oslo_vmware.api [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a34640-dac4-4943-644c-09b9d928f415, 'name': SearchDatastore_Task, 'duration_secs': 0.009964} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.127823] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 694.128311] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 879354d3-7423-41e2-93f6-0d8d3a120170/879354d3-7423-41e2-93f6-0d8d3a120170.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 694.128688] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9cd54665-0e47-4a30-94c8-66f5440841d2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.135944] env[62522]: DEBUG oslo_vmware.api [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 694.135944] env[62522]: value = "task-2415150" [ 694.135944] env[62522]: _type = "Task" [ 694.135944] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.147048] env[62522]: DEBUG oslo_vmware.api [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415150, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.284577] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Acquiring lock "c73686c6-4dd8-4f00-a65a-5d8574409ad1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 694.285280] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Lock "c73686c6-4dd8-4f00-a65a-5d8574409ad1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 694.285280] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Acquiring lock "c73686c6-4dd8-4f00-a65a-5d8574409ad1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 694.285280] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Lock "c73686c6-4dd8-4f00-a65a-5d8574409ad1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 694.285572] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Lock "c73686c6-4dd8-4f00-a65a-5d8574409ad1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 694.289703] env[62522]: INFO nova.compute.manager [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Terminating instance [ 694.295393] env[62522]: DEBUG oslo_vmware.api [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415148, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.517394} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.297536] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 19d3d54c-5ba1-420f-b012-a08add8546c9/19d3d54c-5ba1-420f-b012-a08add8546c9.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 694.297800] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 694.299105] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7c1ac002-8a80-4be9-a540-ba25c0789ff0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.306084] env[62522]: DEBUG oslo_vmware.api [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 694.306084] env[62522]: value = "task-2415151" [ 694.306084] env[62522]: _type = "Task" [ 694.306084] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.319197] env[62522]: DEBUG oslo_vmware.api [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415151, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.401587] env[62522]: DEBUG oslo_vmware.api [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415149, 'name': PowerOffVM_Task, 'duration_secs': 0.211729} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.401941] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 694.402582] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 694.402582] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-543e5c0d-0cae-4ddd-a564-13adf654894a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.490638] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.804s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 694.491400] env[62522]: DEBUG nova.compute.manager [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 694.495789] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 29.587s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 694.496049] env[62522]: DEBUG nova.objects.instance [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62522) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 694.500912] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 694.500912] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 694.500912] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Deleting the datastore file [datastore2] 6d8b5429-113b-4280-9851-bf6614dde4a7 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 694.500912] env[62522]: DEBUG nova.compute.manager [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 694.500912] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 694.501461] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c846f1c-6535-4b1c-b233-17f32bfed469 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.503152] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acf62ccf-a41f-4e2c-aa4d-1ff300160dbc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.512014] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 694.513547] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b87812ce-b428-4343-b408-da1071e97186 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.516140] env[62522]: DEBUG oslo_vmware.api [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 694.516140] env[62522]: value = "task-2415153" [ 694.516140] env[62522]: _type = "Task" [ 694.516140] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.522350] env[62522]: DEBUG oslo_vmware.api [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 694.522350] env[62522]: value = "task-2415154" [ 694.522350] env[62522]: _type = "Task" [ 694.522350] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.529835] env[62522]: DEBUG oslo_vmware.api [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415153, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.535149] env[62522]: DEBUG oslo_vmware.api [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415154, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.647287] env[62522]: DEBUG oslo_vmware.api [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415150, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.801044] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Acquiring lock "refresh_cache-c73686c6-4dd8-4f00-a65a-5d8574409ad1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 694.801289] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Acquired lock "refresh_cache-c73686c6-4dd8-4f00-a65a-5d8574409ad1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.803091] env[62522]: DEBUG nova.network.neutron [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 694.821213] env[62522]: DEBUG oslo_vmware.api [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415151, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082312} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.821213] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 694.821526] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-987d7ab8-92d7-4bc4-9721-f9043d74e5f0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.845378] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] 19d3d54c-5ba1-420f-b012-a08add8546c9/19d3d54c-5ba1-420f-b012-a08add8546c9.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 694.845983] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-77d9a664-ab8d-4999-a7be-c98b68194d59 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.866077] env[62522]: DEBUG oslo_vmware.api [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 694.866077] env[62522]: value = "task-2415155" [ 694.866077] env[62522]: _type = "Task" [ 694.866077] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.875294] env[62522]: DEBUG oslo_vmware.api [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415155, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.001657] env[62522]: DEBUG nova.compute.utils [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 695.007261] env[62522]: DEBUG nova.compute.manager [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 695.007451] env[62522]: DEBUG nova.network.neutron [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 695.034040] env[62522]: DEBUG oslo_vmware.api [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415153, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.295754} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.037449] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 695.037449] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 695.037682] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 695.037682] env[62522]: INFO nova.compute.manager [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Took 1.16 seconds to destroy the instance on the hypervisor. [ 695.038390] env[62522]: DEBUG oslo.service.loopingcall [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 695.038633] env[62522]: DEBUG oslo_vmware.api [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415154, 'name': PowerOffVM_Task, 'duration_secs': 0.412508} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.039414] env[62522]: DEBUG nova.compute.manager [-] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 695.039414] env[62522]: DEBUG nova.network.neutron [-] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 695.040801] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 695.040984] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 695.041843] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-17ff1397-450d-4ce6-b580-0abf6259cde8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.098945] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 695.099185] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 695.099404] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Deleting the datastore file [datastore2] 68b4c229-0ace-486f-9a99-d3c955b7bdfb {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 695.099663] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-22797da0-43d3-4d01-a9c7-6249d2c11810 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.107751] env[62522]: DEBUG oslo_vmware.api [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 695.107751] env[62522]: value = "task-2415157" [ 695.107751] env[62522]: _type = "Task" [ 695.107751] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.115576] env[62522]: DEBUG oslo_vmware.api [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415157, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.120123] env[62522]: DEBUG nova.network.neutron [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Successfully updated port: 2e33c70f-036d-459c-a393-f570cbf7089c {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 695.127479] env[62522]: DEBUG nova.policy [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5de7455684c8411bab8275a9c6d6d50c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '13784066a83f4c8a83f6d65e62a1e6df', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 695.149029] env[62522]: DEBUG oslo_vmware.api [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415150, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.552812} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.149302] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 879354d3-7423-41e2-93f6-0d8d3a120170/879354d3-7423-41e2-93f6-0d8d3a120170.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 695.149519] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 695.149767] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ac89bb8e-2f57-4b58-81bb-af59efd2b5e5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.155547] env[62522]: DEBUG oslo_vmware.api [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 695.155547] env[62522]: value = "task-2415158" [ 695.155547] env[62522]: _type = "Task" [ 695.155547] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.164295] env[62522]: DEBUG oslo_vmware.api [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415158, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.318797] env[62522]: DEBUG nova.network.neutron [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 695.371546] env[62522]: DEBUG nova.network.neutron [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.384538] env[62522]: DEBUG oslo_vmware.api [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415155, 'name': ReconfigVM_Task, 'duration_secs': 0.356866} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.384538] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Reconfigured VM instance instance-00000015 to attach disk [datastore1] 19d3d54c-5ba1-420f-b012-a08add8546c9/19d3d54c-5ba1-420f-b012-a08add8546c9.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 695.385081] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3d07eda3-9d3b-4092-a90d-a576e80645b6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.393018] env[62522]: DEBUG oslo_vmware.api [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 695.393018] env[62522]: value = "task-2415159" [ 695.393018] env[62522]: _type = "Task" [ 695.393018] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.403209] env[62522]: DEBUG oslo_vmware.api [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415159, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.511020] env[62522]: DEBUG nova.compute.manager [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 695.516327] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f381c665-7aac-45f1-998e-6d6bb99177a4 tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.021s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.523111] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.464s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 695.524306] env[62522]: INFO nova.compute.claims [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 695.597106] env[62522]: DEBUG nova.compute.manager [req-543e7d6b-828a-41f8-be37-271d019d1fba req-3738b2e0-06cb-40ea-a80b-84d474f2d3a9 service nova] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Received event network-vif-plugged-2e33c70f-036d-459c-a393-f570cbf7089c {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 695.597485] env[62522]: DEBUG oslo_concurrency.lockutils [req-543e7d6b-828a-41f8-be37-271d019d1fba req-3738b2e0-06cb-40ea-a80b-84d474f2d3a9 service nova] Acquiring lock "c181ce48-9fe2-4400-9047-f8b5a7159dd3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 695.601688] env[62522]: DEBUG oslo_concurrency.lockutils [req-543e7d6b-828a-41f8-be37-271d019d1fba req-3738b2e0-06cb-40ea-a80b-84d474f2d3a9 service nova] Lock "c181ce48-9fe2-4400-9047-f8b5a7159dd3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 695.601688] env[62522]: DEBUG oslo_concurrency.lockutils [req-543e7d6b-828a-41f8-be37-271d019d1fba req-3738b2e0-06cb-40ea-a80b-84d474f2d3a9 service nova] Lock "c181ce48-9fe2-4400-9047-f8b5a7159dd3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.603215] env[62522]: DEBUG nova.compute.manager [req-543e7d6b-828a-41f8-be37-271d019d1fba req-3738b2e0-06cb-40ea-a80b-84d474f2d3a9 service nova] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] No waiting events found dispatching network-vif-plugged-2e33c70f-036d-459c-a393-f570cbf7089c {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 695.603215] env[62522]: WARNING nova.compute.manager [req-543e7d6b-828a-41f8-be37-271d019d1fba req-3738b2e0-06cb-40ea-a80b-84d474f2d3a9 service nova] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Received unexpected event network-vif-plugged-2e33c70f-036d-459c-a393-f570cbf7089c for instance with vm_state building and task_state spawning. [ 695.620111] env[62522]: DEBUG oslo_vmware.api [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415157, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164972} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.621240] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 695.622535] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 695.622535] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 695.622535] env[62522]: INFO nova.compute.manager [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Took 1.12 seconds to destroy the instance on the hypervisor. [ 695.622535] env[62522]: DEBUG oslo.service.loopingcall [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 695.622948] env[62522]: DEBUG nova.compute.manager [-] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 695.623000] env[62522]: DEBUG nova.network.neutron [-] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 695.629786] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "refresh_cache-c181ce48-9fe2-4400-9047-f8b5a7159dd3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 695.629917] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquired lock "refresh_cache-c181ce48-9fe2-4400-9047-f8b5a7159dd3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.630116] env[62522]: DEBUG nova.network.neutron [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 695.666206] env[62522]: DEBUG oslo_vmware.api [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415158, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.145143} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.666587] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 695.667591] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a4ea564-ce0c-4ecd-b4eb-fa4c17a1615b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.691954] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Reconfiguring VM instance instance-00000014 to attach disk [datastore2] 879354d3-7423-41e2-93f6-0d8d3a120170/879354d3-7423-41e2-93f6-0d8d3a120170.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 695.692878] env[62522]: DEBUG nova.network.neutron [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Successfully created port: c470ec3e-5ac7-49ca-a9e0-f0a1943f0ec9 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 695.695115] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f1ea3cf6-0a38-4f5c-9446-5e367af0fab5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.719661] env[62522]: DEBUG oslo_vmware.api [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 695.719661] env[62522]: value = "task-2415160" [ 695.719661] env[62522]: _type = "Task" [ 695.719661] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.727939] env[62522]: DEBUG oslo_vmware.api [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415160, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.803034] env[62522]: DEBUG nova.network.neutron [-] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.855761] env[62522]: DEBUG nova.compute.manager [req-75d444f9-f57e-428b-ba14-c0a95e63d0d1 req-b413086d-875b-4ea3-bd32-f36a3988050f service nova] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Received event network-vif-deleted-7d31f9c2-3052-4e8a-b932-8aa226e03b49 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 695.875024] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Releasing lock "refresh_cache-c73686c6-4dd8-4f00-a65a-5d8574409ad1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 695.875338] env[62522]: DEBUG nova.compute.manager [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 695.875544] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 695.876812] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d9faf75-234b-4e42-8278-969eff8326b9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.885518] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 695.885617] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-75dbf53a-57f5-405e-b667-12c95dd026c5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.892950] env[62522]: DEBUG oslo_vmware.api [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Waiting for the task: (returnval){ [ 695.892950] env[62522]: value = "task-2415161" [ 695.892950] env[62522]: _type = "Task" [ 695.892950] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.905449] env[62522]: DEBUG oslo_vmware.api [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415161, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.908649] env[62522]: DEBUG oslo_vmware.api [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415159, 'name': Rename_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.167656] env[62522]: DEBUG nova.network.neutron [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 696.233506] env[62522]: DEBUG oslo_vmware.api [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415160, 'name': ReconfigVM_Task, 'duration_secs': 0.275356} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.233832] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Reconfigured VM instance instance-00000014 to attach disk [datastore2] 879354d3-7423-41e2-93f6-0d8d3a120170/879354d3-7423-41e2-93f6-0d8d3a120170.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 696.234448] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-32f105ea-422f-4414-a9ca-e6983d249af5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.243621] env[62522]: DEBUG oslo_vmware.api [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 696.243621] env[62522]: value = "task-2415162" [ 696.243621] env[62522]: _type = "Task" [ 696.243621] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.252440] env[62522]: DEBUG oslo_vmware.api [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415162, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.306274] env[62522]: INFO nova.compute.manager [-] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Took 1.27 seconds to deallocate network for instance. [ 696.336631] env[62522]: DEBUG nova.network.neutron [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Updating instance_info_cache with network_info: [{"id": "2e33c70f-036d-459c-a393-f570cbf7089c", "address": "fa:16:3e:58:55:4e", "network": {"id": "5f1d73d1-ff9e-4081-87cf-8df6294f67c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-892212702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "962664c996f24cf9ae192f79fae18ca4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "419a5b3f-4c6f-4168-9def-746b4d8c5c24", "external-id": "nsx-vlan-transportzone-656", "segmentation_id": 656, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e33c70f-03", "ovs_interfaceid": "2e33c70f-036d-459c-a393-f570cbf7089c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.350819] env[62522]: DEBUG nova.network.neutron [-] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.407127] env[62522]: DEBUG oslo_vmware.api [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415159, 'name': Rename_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.410162] env[62522]: DEBUG oslo_vmware.api [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415161, 'name': PowerOffVM_Task, 'duration_secs': 0.115707} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.410415] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 696.411472] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 696.411740] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-26770ebe-5b03-4442-a96a-b21e615a5f21 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.438381] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 696.438497] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 696.438658] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Deleting the datastore file [datastore2] c73686c6-4dd8-4f00-a65a-5d8574409ad1 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 696.439086] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-32992ca2-a645-42c2-b05f-61e53a861a8f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.445100] env[62522]: DEBUG oslo_vmware.api [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Waiting for the task: (returnval){ [ 696.445100] env[62522]: value = "task-2415164" [ 696.445100] env[62522]: _type = "Task" [ 696.445100] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.453803] env[62522]: DEBUG oslo_vmware.api [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415164, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.529953] env[62522]: DEBUG nova.compute.manager [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 696.557528] env[62522]: DEBUG nova.virt.hardware [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 696.557766] env[62522]: DEBUG nova.virt.hardware [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 696.557924] env[62522]: DEBUG nova.virt.hardware [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 696.558120] env[62522]: DEBUG nova.virt.hardware [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 696.558268] env[62522]: DEBUG nova.virt.hardware [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 696.558442] env[62522]: DEBUG nova.virt.hardware [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 696.558655] env[62522]: DEBUG nova.virt.hardware [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 696.558818] env[62522]: DEBUG nova.virt.hardware [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 696.558984] env[62522]: DEBUG nova.virt.hardware [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 696.559416] env[62522]: DEBUG nova.virt.hardware [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 696.559494] env[62522]: DEBUG nova.virt.hardware [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 696.560837] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c14ed11-0547-4436-b706-9bf1d4c19d6c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.573123] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdcbb590-3bb1-45b6-a744-bd27bde69ba4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.752901] env[62522]: DEBUG oslo_vmware.api [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415162, 'name': Rename_Task, 'duration_secs': 0.136736} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.754168] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 696.754168] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ccce7bff-dfcd-4931-9b9b-1431c4afd66b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.759618] env[62522]: DEBUG oslo_vmware.api [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 696.759618] env[62522]: value = "task-2415165" [ 696.759618] env[62522]: _type = "Task" [ 696.759618] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.767745] env[62522]: DEBUG oslo_vmware.api [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415165, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.814170] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 696.841740] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Releasing lock "refresh_cache-c181ce48-9fe2-4400-9047-f8b5a7159dd3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 696.841740] env[62522]: DEBUG nova.compute.manager [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Instance network_info: |[{"id": "2e33c70f-036d-459c-a393-f570cbf7089c", "address": "fa:16:3e:58:55:4e", "network": {"id": "5f1d73d1-ff9e-4081-87cf-8df6294f67c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-892212702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "962664c996f24cf9ae192f79fae18ca4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "419a5b3f-4c6f-4168-9def-746b4d8c5c24", "external-id": "nsx-vlan-transportzone-656", "segmentation_id": 656, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e33c70f-03", "ovs_interfaceid": "2e33c70f-036d-459c-a393-f570cbf7089c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 696.841867] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:55:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '419a5b3f-4c6f-4168-9def-746b4d8c5c24', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2e33c70f-036d-459c-a393-f570cbf7089c', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 696.847552] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Creating folder: Project (962664c996f24cf9ae192f79fae18ca4). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 696.847832] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d0aae4bc-02db-443b-8b8d-66452cdd398d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.854685] env[62522]: INFO nova.compute.manager [-] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Took 1.23 seconds to deallocate network for instance. [ 696.865212] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Created folder: Project (962664c996f24cf9ae192f79fae18ca4) in parent group-v489562. [ 696.865212] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Creating folder: Instances. Parent ref: group-v489626. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 696.871345] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1a6addf6-8f62-4656-a7de-0a04cc2eb2b3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.881149] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Created folder: Instances in parent group-v489626. [ 696.881402] env[62522]: DEBUG oslo.service.loopingcall [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 696.881596] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 696.881818] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1c70454c-a5ce-46f1-a1cd-5a436ffb98ee {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.906705] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 696.906705] env[62522]: value = "task-2415168" [ 696.906705] env[62522]: _type = "Task" [ 696.906705] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.911452] env[62522]: DEBUG oslo_vmware.api [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415159, 'name': Rename_Task, 'duration_secs': 1.141228} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.912520] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 696.913059] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0a992498-9715-4806-82e8-5a7c45c54bcb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.924119] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415168, 'name': CreateVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.925789] env[62522]: DEBUG oslo_vmware.api [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 696.925789] env[62522]: value = "task-2415169" [ 696.925789] env[62522]: _type = "Task" [ 696.925789] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.936753] env[62522]: DEBUG oslo_vmware.api [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415169, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.961319] env[62522]: DEBUG oslo_vmware.api [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Task: {'id': task-2415164, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.104903} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.962421] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 696.962421] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 696.962421] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 696.962421] env[62522]: INFO nova.compute.manager [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Took 1.09 seconds to destroy the instance on the hypervisor. [ 696.962640] env[62522]: DEBUG oslo.service.loopingcall [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 696.962799] env[62522]: DEBUG nova.compute.manager [-] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 696.962873] env[62522]: DEBUG nova.network.neutron [-] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 696.989229] env[62522]: DEBUG nova.network.neutron [-] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 697.075038] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24411a67-f316-4455-bce0-b53d6710b9e9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.082563] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d5a2b73-f48c-4cbd-a28c-506a90caa44c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.112871] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71aae4c9-10c4-45e4-b3f5-6fd4b1a2449c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.120482] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2d728f-9b7f-4dd2-b0fb-a0981f199f12 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.134649] env[62522]: DEBUG nova.compute.provider_tree [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 697.269388] env[62522]: DEBUG oslo_vmware.api [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415165, 'name': PowerOnVM_Task, 'duration_secs': 0.481458} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.269853] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 697.270145] env[62522]: INFO nova.compute.manager [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Took 11.41 seconds to spawn the instance on the hypervisor. [ 697.270427] env[62522]: DEBUG nova.compute.manager [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 697.271318] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb2df2d9-6038-47d0-82c4-1b5289cd5d12 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.361946] env[62522]: DEBUG oslo_concurrency.lockutils [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 697.425170] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415168, 'name': CreateVM_Task, 'duration_secs': 0.369049} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.425543] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 697.426513] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.426832] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.427380] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 697.427865] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f983df8-d78f-45b9-be51-7e47194f76d2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.442491] env[62522]: DEBUG oslo_vmware.api [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415169, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.442999] env[62522]: DEBUG oslo_vmware.api [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 697.442999] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d5643c-d637-4def-a1ba-56fd6f0e87b2" [ 697.442999] env[62522]: _type = "Task" [ 697.442999] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.454452] env[62522]: DEBUG oslo_vmware.api [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d5643c-d637-4def-a1ba-56fd6f0e87b2, 'name': SearchDatastore_Task, 'duration_secs': 0.012618} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.455018] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 697.455417] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 697.456028] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.456028] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.456500] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 697.456857] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce57eb4e-9ea7-405d-8e1f-a23b0e51c0b3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.465035] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 697.465035] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 697.465419] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28f67f52-1513-4648-bfc8-fd6eb5b5d267 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.470268] env[62522]: DEBUG oslo_vmware.api [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 697.470268] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b0456a-1ef7-8347-2008-f2899e3ce7a5" [ 697.470268] env[62522]: _type = "Task" [ 697.470268] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.478804] env[62522]: DEBUG oslo_vmware.api [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b0456a-1ef7-8347-2008-f2899e3ce7a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.491290] env[62522]: DEBUG nova.network.neutron [-] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.637804] env[62522]: DEBUG nova.scheduler.client.report [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 697.787686] env[62522]: INFO nova.compute.manager [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Took 40.32 seconds to build instance. [ 697.945074] env[62522]: DEBUG oslo_vmware.api [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415169, 'name': PowerOnVM_Task, 'duration_secs': 0.536348} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.945074] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 697.945074] env[62522]: INFO nova.compute.manager [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Took 9.28 seconds to spawn the instance on the hypervisor. [ 697.945074] env[62522]: DEBUG nova.compute.manager [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 697.945074] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01860e2b-5053-424c-902d-a8c677117377 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.966187] env[62522]: DEBUG nova.network.neutron [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Successfully updated port: c470ec3e-5ac7-49ca-a9e0-f0a1943f0ec9 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 697.980584] env[62522]: DEBUG oslo_vmware.api [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b0456a-1ef7-8347-2008-f2899e3ce7a5, 'name': SearchDatastore_Task, 'duration_secs': 0.010886} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.981927] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-081ead06-16e1-45d2-9743-cc18b1b7ed1c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.988785] env[62522]: DEBUG oslo_vmware.api [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 697.988785] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]523ad6aa-6b17-7f71-5827-9447e5c2be56" [ 697.988785] env[62522]: _type = "Task" [ 697.988785] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.993255] env[62522]: INFO nova.compute.manager [-] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Took 1.03 seconds to deallocate network for instance. [ 697.998716] env[62522]: DEBUG oslo_vmware.api [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]523ad6aa-6b17-7f71-5827-9447e5c2be56, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.143370] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.621s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 698.143986] env[62522]: DEBUG nova.compute.manager [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 698.146876] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.807s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 698.147529] env[62522]: DEBUG nova.objects.instance [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Lazy-loading 'resources' on Instance uuid 84ad5317-344d-44c1-9318-fa1574321296 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 698.286282] env[62522]: DEBUG nova.compute.manager [req-711fe21e-a49e-4795-a679-8adbd041abe3 req-285d8305-dda3-41d8-9da8-8be9fca859cf service nova] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Received event network-changed-2e33c70f-036d-459c-a393-f570cbf7089c {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 698.286525] env[62522]: DEBUG nova.compute.manager [req-711fe21e-a49e-4795-a679-8adbd041abe3 req-285d8305-dda3-41d8-9da8-8be9fca859cf service nova] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Refreshing instance network info cache due to event network-changed-2e33c70f-036d-459c-a393-f570cbf7089c. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 698.286750] env[62522]: DEBUG oslo_concurrency.lockutils [req-711fe21e-a49e-4795-a679-8adbd041abe3 req-285d8305-dda3-41d8-9da8-8be9fca859cf service nova] Acquiring lock "refresh_cache-c181ce48-9fe2-4400-9047-f8b5a7159dd3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 698.286896] env[62522]: DEBUG oslo_concurrency.lockutils [req-711fe21e-a49e-4795-a679-8adbd041abe3 req-285d8305-dda3-41d8-9da8-8be9fca859cf service nova] Acquired lock "refresh_cache-c181ce48-9fe2-4400-9047-f8b5a7159dd3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.287068] env[62522]: DEBUG nova.network.neutron [req-711fe21e-a49e-4795-a679-8adbd041abe3 req-285d8305-dda3-41d8-9da8-8be9fca859cf service nova] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Refreshing network info cache for port 2e33c70f-036d-459c-a393-f570cbf7089c {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 698.292213] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9526fe02-6099-40d0-8d58-a67885f475aa tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "879354d3-7423-41e2-93f6-0d8d3a120170" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.877s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 698.461105] env[62522]: INFO nova.compute.manager [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Took 36.57 seconds to build instance. [ 698.473632] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Acquiring lock "refresh_cache-253a2903-2601-4f0a-8882-e7510406f9d5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 698.473632] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Acquired lock "refresh_cache-253a2903-2601-4f0a-8882-e7510406f9d5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.473632] env[62522]: DEBUG nova.network.neutron [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 698.499970] env[62522]: DEBUG oslo_vmware.api [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]523ad6aa-6b17-7f71-5827-9447e5c2be56, 'name': SearchDatastore_Task, 'duration_secs': 0.047085} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.501102] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 698.501368] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] c181ce48-9fe2-4400-9047-f8b5a7159dd3/c181ce48-9fe2-4400-9047-f8b5a7159dd3.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 698.502561] env[62522]: DEBUG nova.compute.manager [req-766fa517-e4b4-4e6c-9081-2c5c9407b7b7 req-69e885b0-614d-4cb0-a597-0c7ed0d67032 service nova] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Received event network-vif-deleted-80d82c33-bbd9-41b4-ba21-705502101cf8 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 698.502720] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f93b9029-49c4-4d47-b394-9d70343e1c03 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.505744] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 698.511100] env[62522]: DEBUG oslo_vmware.api [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 698.511100] env[62522]: value = "task-2415170" [ 698.511100] env[62522]: _type = "Task" [ 698.511100] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.519869] env[62522]: DEBUG oslo_vmware.api [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415170, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.650587] env[62522]: DEBUG nova.compute.utils [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 698.655097] env[62522]: DEBUG nova.compute.manager [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 698.655097] env[62522]: DEBUG nova.network.neutron [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 698.739683] env[62522]: DEBUG nova.policy [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c311a0f0ba854dc3b7f30d641c97229f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ca08d150df0147b29b30fb57739c7a6e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 698.794373] env[62522]: DEBUG nova.compute.manager [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 698.965595] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eee380ed-4968-4e3d-8f62-c8c88be56e9b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "19d3d54c-5ba1-420f-b012-a08add8546c9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.299s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 699.024153] env[62522]: DEBUG oslo_vmware.api [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415170, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.060116] env[62522]: DEBUG nova.network.neutron [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 699.155466] env[62522]: DEBUG nova.compute.manager [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 699.198253] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67d1af64-b835-4cfa-ae1a-513127b6818a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.206545] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea0d323f-d816-41b5-9d6b-03f423be44f3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.217528] env[62522]: DEBUG nova.network.neutron [req-711fe21e-a49e-4795-a679-8adbd041abe3 req-285d8305-dda3-41d8-9da8-8be9fca859cf service nova] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Updated VIF entry in instance network info cache for port 2e33c70f-036d-459c-a393-f570cbf7089c. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 699.218585] env[62522]: DEBUG nova.network.neutron [req-711fe21e-a49e-4795-a679-8adbd041abe3 req-285d8305-dda3-41d8-9da8-8be9fca859cf service nova] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Updating instance_info_cache with network_info: [{"id": "2e33c70f-036d-459c-a393-f570cbf7089c", "address": "fa:16:3e:58:55:4e", "network": {"id": "5f1d73d1-ff9e-4081-87cf-8df6294f67c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-892212702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "962664c996f24cf9ae192f79fae18ca4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "419a5b3f-4c6f-4168-9def-746b4d8c5c24", "external-id": "nsx-vlan-transportzone-656", "segmentation_id": 656, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e33c70f-03", "ovs_interfaceid": "2e33c70f-036d-459c-a393-f570cbf7089c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.252241] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6d89c51-f8be-4922-bf9c-6dc93d21ccde {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.262366] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70405f63-d4e5-4335-b62c-82413a6c3679 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.278612] env[62522]: DEBUG nova.compute.provider_tree [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 699.318117] env[62522]: DEBUG oslo_concurrency.lockutils [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 699.468850] env[62522]: DEBUG nova.compute.manager [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 699.472242] env[62522]: DEBUG nova.network.neutron [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Updating instance_info_cache with network_info: [{"id": "c470ec3e-5ac7-49ca-a9e0-f0a1943f0ec9", "address": "fa:16:3e:b9:ef:f1", "network": {"id": "a2389999-ffd5-4ea6-a9b5-2ee7151a9810", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-398612397-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13784066a83f4c8a83f6d65e62a1e6df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc470ec3e-5a", "ovs_interfaceid": "c470ec3e-5ac7-49ca-a9e0-f0a1943f0ec9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.473784] env[62522]: DEBUG nova.network.neutron [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Successfully created port: 66de060c-1aa7-4119-b646-bd495f55add8 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 699.521797] env[62522]: DEBUG oslo_vmware.api [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415170, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.869575} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.522202] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] c181ce48-9fe2-4400-9047-f8b5a7159dd3/c181ce48-9fe2-4400-9047-f8b5a7159dd3.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 699.523043] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 699.523364] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3a7d50cf-caa8-46ce-8712-26dc72044912 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.531296] env[62522]: DEBUG oslo_vmware.api [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 699.531296] env[62522]: value = "task-2415171" [ 699.531296] env[62522]: _type = "Task" [ 699.531296] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.540446] env[62522]: DEBUG oslo_vmware.api [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415171, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.721159] env[62522]: DEBUG oslo_concurrency.lockutils [req-711fe21e-a49e-4795-a679-8adbd041abe3 req-285d8305-dda3-41d8-9da8-8be9fca859cf service nova] Releasing lock "refresh_cache-c181ce48-9fe2-4400-9047-f8b5a7159dd3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 699.721337] env[62522]: DEBUG nova.compute.manager [req-711fe21e-a49e-4795-a679-8adbd041abe3 req-285d8305-dda3-41d8-9da8-8be9fca859cf service nova] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Received event network-vif-plugged-c470ec3e-5ac7-49ca-a9e0-f0a1943f0ec9 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 699.721494] env[62522]: DEBUG oslo_concurrency.lockutils [req-711fe21e-a49e-4795-a679-8adbd041abe3 req-285d8305-dda3-41d8-9da8-8be9fca859cf service nova] Acquiring lock "253a2903-2601-4f0a-8882-e7510406f9d5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 699.721697] env[62522]: DEBUG oslo_concurrency.lockutils [req-711fe21e-a49e-4795-a679-8adbd041abe3 req-285d8305-dda3-41d8-9da8-8be9fca859cf service nova] Lock "253a2903-2601-4f0a-8882-e7510406f9d5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 699.721858] env[62522]: DEBUG oslo_concurrency.lockutils [req-711fe21e-a49e-4795-a679-8adbd041abe3 req-285d8305-dda3-41d8-9da8-8be9fca859cf service nova] Lock "253a2903-2601-4f0a-8882-e7510406f9d5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 699.722040] env[62522]: DEBUG nova.compute.manager [req-711fe21e-a49e-4795-a679-8adbd041abe3 req-285d8305-dda3-41d8-9da8-8be9fca859cf service nova] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] No waiting events found dispatching network-vif-plugged-c470ec3e-5ac7-49ca-a9e0-f0a1943f0ec9 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 699.722217] env[62522]: WARNING nova.compute.manager [req-711fe21e-a49e-4795-a679-8adbd041abe3 req-285d8305-dda3-41d8-9da8-8be9fca859cf service nova] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Received unexpected event network-vif-plugged-c470ec3e-5ac7-49ca-a9e0-f0a1943f0ec9 for instance with vm_state building and task_state spawning. [ 699.782572] env[62522]: DEBUG nova.scheduler.client.report [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 699.875085] env[62522]: DEBUG nova.compute.manager [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Stashing vm_state: active {{(pid=62522) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 699.979405] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Releasing lock "refresh_cache-253a2903-2601-4f0a-8882-e7510406f9d5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 699.979723] env[62522]: DEBUG nova.compute.manager [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Instance network_info: |[{"id": "c470ec3e-5ac7-49ca-a9e0-f0a1943f0ec9", "address": "fa:16:3e:b9:ef:f1", "network": {"id": "a2389999-ffd5-4ea6-a9b5-2ee7151a9810", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-398612397-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13784066a83f4c8a83f6d65e62a1e6df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc470ec3e-5a", "ovs_interfaceid": "c470ec3e-5ac7-49ca-a9e0-f0a1943f0ec9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 699.982569] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:ef:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee20e439-fed9-490e-97dd-f3c886977ae1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c470ec3e-5ac7-49ca-a9e0-f0a1943f0ec9', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 699.990307] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Creating folder: Project (13784066a83f4c8a83f6d65e62a1e6df). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 699.992106] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3fe2a68c-11b1-47e7-a092-b363d7e10cfe {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.006699] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Created folder: Project (13784066a83f4c8a83f6d65e62a1e6df) in parent group-v489562. [ 700.006914] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Creating folder: Instances. Parent ref: group-v489629. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 700.008602] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 700.008602] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-75329ef8-de49-46f8-b2e3-79caff3d5ca5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.019112] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Created folder: Instances in parent group-v489629. [ 700.019112] env[62522]: DEBUG oslo.service.loopingcall [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 700.019112] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 700.019112] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bf55dfb8-48fc-4bd4-9132-9f475e9853e9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.045653] env[62522]: DEBUG oslo_vmware.api [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415171, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06406} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.046996] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 700.047291] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 700.047291] env[62522]: value = "task-2415174" [ 700.047291] env[62522]: _type = "Task" [ 700.047291] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.047976] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bac263a-cb02-481f-8ec7-c72043b65f4a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.076901] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Reconfiguring VM instance instance-00000016 to attach disk [datastore2] c181ce48-9fe2-4400-9047-f8b5a7159dd3/c181ce48-9fe2-4400-9047-f8b5a7159dd3.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 700.077235] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415174, 'name': CreateVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.077811] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35b682db-837c-443f-a8c9-e663bc9c0bbc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.099373] env[62522]: DEBUG oslo_vmware.api [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 700.099373] env[62522]: value = "task-2415175" [ 700.099373] env[62522]: _type = "Task" [ 700.099373] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.108362] env[62522]: DEBUG oslo_vmware.api [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415175, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.164648] env[62522]: DEBUG nova.compute.manager [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 700.194293] env[62522]: DEBUG nova.virt.hardware [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 700.194585] env[62522]: DEBUG nova.virt.hardware [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 700.194776] env[62522]: DEBUG nova.virt.hardware [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 700.194863] env[62522]: DEBUG nova.virt.hardware [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 700.195025] env[62522]: DEBUG nova.virt.hardware [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 700.195177] env[62522]: DEBUG nova.virt.hardware [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 700.195395] env[62522]: DEBUG nova.virt.hardware [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 700.195553] env[62522]: DEBUG nova.virt.hardware [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 700.195720] env[62522]: DEBUG nova.virt.hardware [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 700.195885] env[62522]: DEBUG nova.virt.hardware [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 700.196070] env[62522]: DEBUG nova.virt.hardware [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 700.197045] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cdcd313-a2d9-4e36-835a-7789873c3b71 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.205686] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e470d6d-7073-4f9e-beff-46ed2524b098 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.293144] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.145s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 700.295404] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.213s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 700.296158] env[62522]: DEBUG nova.objects.instance [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Lazy-loading 'resources' on Instance uuid 74b6ae10-a595-4139-8eda-38fe1aa298cf {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 700.320023] env[62522]: INFO nova.scheduler.client.report [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Deleted allocations for instance 84ad5317-344d-44c1-9318-fa1574321296 [ 700.398300] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 700.412742] env[62522]: DEBUG nova.compute.manager [req-319afa8c-55c0-4159-ba15-f7e80151e2ec req-cdca0f1c-626e-4462-9caf-14d9b659eec4 service nova] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Received event network-changed-c470ec3e-5ac7-49ca-a9e0-f0a1943f0ec9 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 700.413009] env[62522]: DEBUG nova.compute.manager [req-319afa8c-55c0-4159-ba15-f7e80151e2ec req-cdca0f1c-626e-4462-9caf-14d9b659eec4 service nova] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Refreshing instance network info cache due to event network-changed-c470ec3e-5ac7-49ca-a9e0-f0a1943f0ec9. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 700.413500] env[62522]: DEBUG oslo_concurrency.lockutils [req-319afa8c-55c0-4159-ba15-f7e80151e2ec req-cdca0f1c-626e-4462-9caf-14d9b659eec4 service nova] Acquiring lock "refresh_cache-253a2903-2601-4f0a-8882-e7510406f9d5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 700.413726] env[62522]: DEBUG oslo_concurrency.lockutils [req-319afa8c-55c0-4159-ba15-f7e80151e2ec req-cdca0f1c-626e-4462-9caf-14d9b659eec4 service nova] Acquired lock "refresh_cache-253a2903-2601-4f0a-8882-e7510406f9d5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.413914] env[62522]: DEBUG nova.network.neutron [req-319afa8c-55c0-4159-ba15-f7e80151e2ec req-cdca0f1c-626e-4462-9caf-14d9b659eec4 service nova] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Refreshing network info cache for port c470ec3e-5ac7-49ca-a9e0-f0a1943f0ec9 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 700.526292] env[62522]: DEBUG nova.compute.manager [req-e654791f-c8cd-40a4-af9b-4352c753ce54 req-21b3bd2b-cc9c-4d10-9ba0-414cba0c3110 service nova] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Received event network-changed-bb09cad6-a323-4801-8cb8-7e58b646a38e {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 700.526495] env[62522]: DEBUG nova.compute.manager [req-e654791f-c8cd-40a4-af9b-4352c753ce54 req-21b3bd2b-cc9c-4d10-9ba0-414cba0c3110 service nova] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Refreshing instance network info cache due to event network-changed-bb09cad6-a323-4801-8cb8-7e58b646a38e. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 700.526768] env[62522]: DEBUG oslo_concurrency.lockutils [req-e654791f-c8cd-40a4-af9b-4352c753ce54 req-21b3bd2b-cc9c-4d10-9ba0-414cba0c3110 service nova] Acquiring lock "refresh_cache-19d3d54c-5ba1-420f-b012-a08add8546c9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 700.526837] env[62522]: DEBUG oslo_concurrency.lockutils [req-e654791f-c8cd-40a4-af9b-4352c753ce54 req-21b3bd2b-cc9c-4d10-9ba0-414cba0c3110 service nova] Acquired lock "refresh_cache-19d3d54c-5ba1-420f-b012-a08add8546c9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.526999] env[62522]: DEBUG nova.network.neutron [req-e654791f-c8cd-40a4-af9b-4352c753ce54 req-21b3bd2b-cc9c-4d10-9ba0-414cba0c3110 service nova] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Refreshing network info cache for port bb09cad6-a323-4801-8cb8-7e58b646a38e {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 700.561682] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415174, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.608592] env[62522]: DEBUG oslo_vmware.api [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415175, 'name': ReconfigVM_Task, 'duration_secs': 0.459273} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.608891] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Reconfigured VM instance instance-00000016 to attach disk [datastore2] c181ce48-9fe2-4400-9047-f8b5a7159dd3/c181ce48-9fe2-4400-9047-f8b5a7159dd3.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 700.609947] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7757e314-b3d4-457a-95b0-710d66f15288 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.616575] env[62522]: DEBUG oslo_vmware.api [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 700.616575] env[62522]: value = "task-2415176" [ 700.616575] env[62522]: _type = "Task" [ 700.616575] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.624376] env[62522]: DEBUG oslo_vmware.api [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415176, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.828986] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3a5fe7ff-520f-40b8-9d63-17a38184c5a6 tempest-ServersAdminTestJSON-2113526319 tempest-ServersAdminTestJSON-2113526319-project-member] Lock "84ad5317-344d-44c1-9318-fa1574321296" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.121s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 701.061937] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415174, 'name': CreateVM_Task, 'duration_secs': 0.73705} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.062399] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 701.063106] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 701.063280] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.064458] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 701.064828] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91429a1e-ec70-435b-b291-72eee33ef628 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.072763] env[62522]: DEBUG oslo_vmware.api [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Waiting for the task: (returnval){ [ 701.072763] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522cc48e-2692-8776-085f-ed8455462f03" [ 701.072763] env[62522]: _type = "Task" [ 701.072763] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.094054] env[62522]: DEBUG oslo_vmware.api [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522cc48e-2692-8776-085f-ed8455462f03, 'name': SearchDatastore_Task, 'duration_secs': 0.013631} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.094054] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 701.094054] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 701.097031] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 701.097301] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.097512] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 701.100313] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-341a32ea-429a-4a97-9a36-ad52ce5979d2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.114639] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 701.114822] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 701.115571] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34e921e0-e74e-465d-83f2-e62bbf864379 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.124050] env[62522]: DEBUG oslo_vmware.api [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Waiting for the task: (returnval){ [ 701.124050] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5213ab0e-e231-9cd9-d718-fd60383194e0" [ 701.124050] env[62522]: _type = "Task" [ 701.124050] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.130054] env[62522]: DEBUG oslo_vmware.api [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415176, 'name': Rename_Task, 'duration_secs': 0.440947} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.134970] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 701.135607] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-49f02094-eb9f-46b4-8192-8ee1d1e11267 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.142137] env[62522]: DEBUG oslo_vmware.api [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5213ab0e-e231-9cd9-d718-fd60383194e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.143423] env[62522]: DEBUG oslo_vmware.api [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 701.143423] env[62522]: value = "task-2415177" [ 701.143423] env[62522]: _type = "Task" [ 701.143423] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.155284] env[62522]: DEBUG oslo_vmware.api [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415177, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.341897] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98e2e577-b81d-4272-89c0-8c70b2bedc30 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.349183] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44720395-be41-47de-a04c-98af9e9e7aee {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.386078] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe51d2d0-bedf-4421-9a62-50e3a5de7acc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.399292] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfbce42e-f71b-4e83-a8ba-3cb47999c13c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.410529] env[62522]: DEBUG nova.compute.provider_tree [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 701.418997] env[62522]: DEBUG nova.network.neutron [req-319afa8c-55c0-4159-ba15-f7e80151e2ec req-cdca0f1c-626e-4462-9caf-14d9b659eec4 service nova] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Updated VIF entry in instance network info cache for port c470ec3e-5ac7-49ca-a9e0-f0a1943f0ec9. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 701.421397] env[62522]: DEBUG nova.network.neutron [req-319afa8c-55c0-4159-ba15-f7e80151e2ec req-cdca0f1c-626e-4462-9caf-14d9b659eec4 service nova] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Updating instance_info_cache with network_info: [{"id": "c470ec3e-5ac7-49ca-a9e0-f0a1943f0ec9", "address": "fa:16:3e:b9:ef:f1", "network": {"id": "a2389999-ffd5-4ea6-a9b5-2ee7151a9810", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-398612397-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13784066a83f4c8a83f6d65e62a1e6df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc470ec3e-5a", "ovs_interfaceid": "c470ec3e-5ac7-49ca-a9e0-f0a1943f0ec9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.455687] env[62522]: DEBUG nova.network.neutron [req-e654791f-c8cd-40a4-af9b-4352c753ce54 req-21b3bd2b-cc9c-4d10-9ba0-414cba0c3110 service nova] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Updated VIF entry in instance network info cache for port bb09cad6-a323-4801-8cb8-7e58b646a38e. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 701.456029] env[62522]: DEBUG nova.network.neutron [req-e654791f-c8cd-40a4-af9b-4352c753ce54 req-21b3bd2b-cc9c-4d10-9ba0-414cba0c3110 service nova] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Updating instance_info_cache with network_info: [{"id": "bb09cad6-a323-4801-8cb8-7e58b646a38e", "address": "fa:16:3e:c2:3c:c7", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.161", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb09cad6-a3", "ovs_interfaceid": "bb09cad6-a323-4801-8cb8-7e58b646a38e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.490847] env[62522]: DEBUG nova.network.neutron [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Successfully updated port: 66de060c-1aa7-4119-b646-bd495f55add8 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 701.640717] env[62522]: DEBUG oslo_vmware.api [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5213ab0e-e231-9cd9-d718-fd60383194e0, 'name': SearchDatastore_Task, 'duration_secs': 0.030316} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.641593] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0148c52-8ef3-4610-b2c8-5bf797179784 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.650085] env[62522]: DEBUG oslo_vmware.api [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Waiting for the task: (returnval){ [ 701.650085] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5268bfbb-3e85-49ea-59bf-510b79705564" [ 701.650085] env[62522]: _type = "Task" [ 701.650085] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.653661] env[62522]: DEBUG oslo_vmware.api [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415177, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.663044] env[62522]: DEBUG oslo_vmware.api [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5268bfbb-3e85-49ea-59bf-510b79705564, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.914080] env[62522]: DEBUG nova.scheduler.client.report [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 701.923120] env[62522]: DEBUG oslo_concurrency.lockutils [req-319afa8c-55c0-4159-ba15-f7e80151e2ec req-cdca0f1c-626e-4462-9caf-14d9b659eec4 service nova] Releasing lock "refresh_cache-253a2903-2601-4f0a-8882-e7510406f9d5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 701.961668] env[62522]: DEBUG oslo_concurrency.lockutils [req-e654791f-c8cd-40a4-af9b-4352c753ce54 req-21b3bd2b-cc9c-4d10-9ba0-414cba0c3110 service nova] Releasing lock "refresh_cache-19d3d54c-5ba1-420f-b012-a08add8546c9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 701.996729] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Acquiring lock "refresh_cache-17e1557d-e4cf-45b0-84da-4cbcffe31fb6" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 701.996729] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Acquired lock "refresh_cache-17e1557d-e4cf-45b0-84da-4cbcffe31fb6" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.996729] env[62522]: DEBUG nova.network.neutron [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 702.155788] env[62522]: DEBUG oslo_vmware.api [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415177, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.165735] env[62522]: DEBUG oslo_vmware.api [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5268bfbb-3e85-49ea-59bf-510b79705564, 'name': SearchDatastore_Task, 'duration_secs': 0.025318} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.166293] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 702.166701] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 253a2903-2601-4f0a-8882-e7510406f9d5/253a2903-2601-4f0a-8882-e7510406f9d5.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 702.168908] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-77f79002-3736-4191-8920-14268f20cbd2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.177041] env[62522]: DEBUG oslo_vmware.api [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Waiting for the task: (returnval){ [ 702.177041] env[62522]: value = "task-2415178" [ 702.177041] env[62522]: _type = "Task" [ 702.177041] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.183536] env[62522]: DEBUG oslo_vmware.api [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Task: {'id': task-2415178, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.420568] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.125s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 702.425349] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 26.122s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 702.425662] env[62522]: DEBUG nova.objects.instance [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62522) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 702.461325] env[62522]: INFO nova.scheduler.client.report [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Deleted allocations for instance 74b6ae10-a595-4139-8eda-38fe1aa298cf [ 702.581331] env[62522]: DEBUG nova.network.neutron [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 702.659588] env[62522]: DEBUG oslo_vmware.api [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415177, 'name': PowerOnVM_Task} progress is 91%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.690043] env[62522]: DEBUG oslo_vmware.api [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Task: {'id': task-2415178, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497472} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.690043] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 253a2903-2601-4f0a-8882-e7510406f9d5/253a2903-2601-4f0a-8882-e7510406f9d5.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 702.690043] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 702.691390] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1b7fd830-341b-4197-810e-85e6c0ae5772 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.701527] env[62522]: DEBUG oslo_vmware.api [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Waiting for the task: (returnval){ [ 702.701527] env[62522]: value = "task-2415179" [ 702.701527] env[62522]: _type = "Task" [ 702.701527] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.719494] env[62522]: DEBUG oslo_vmware.api [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Task: {'id': task-2415179, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.745165] env[62522]: DEBUG nova.compute.manager [req-abd90c76-78d1-4360-a045-f281689a1ca4 req-35178aed-a102-439a-81d5-576b83eae012 service nova] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Received event network-vif-plugged-66de060c-1aa7-4119-b646-bd495f55add8 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 702.745502] env[62522]: DEBUG oslo_concurrency.lockutils [req-abd90c76-78d1-4360-a045-f281689a1ca4 req-35178aed-a102-439a-81d5-576b83eae012 service nova] Acquiring lock "17e1557d-e4cf-45b0-84da-4cbcffe31fb6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 702.745977] env[62522]: DEBUG oslo_concurrency.lockutils [req-abd90c76-78d1-4360-a045-f281689a1ca4 req-35178aed-a102-439a-81d5-576b83eae012 service nova] Lock "17e1557d-e4cf-45b0-84da-4cbcffe31fb6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 702.747043] env[62522]: DEBUG oslo_concurrency.lockutils [req-abd90c76-78d1-4360-a045-f281689a1ca4 req-35178aed-a102-439a-81d5-576b83eae012 service nova] Lock "17e1557d-e4cf-45b0-84da-4cbcffe31fb6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 702.747506] env[62522]: DEBUG nova.compute.manager [req-abd90c76-78d1-4360-a045-f281689a1ca4 req-35178aed-a102-439a-81d5-576b83eae012 service nova] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] No waiting events found dispatching network-vif-plugged-66de060c-1aa7-4119-b646-bd495f55add8 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 702.748048] env[62522]: WARNING nova.compute.manager [req-abd90c76-78d1-4360-a045-f281689a1ca4 req-35178aed-a102-439a-81d5-576b83eae012 service nova] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Received unexpected event network-vif-plugged-66de060c-1aa7-4119-b646-bd495f55add8 for instance with vm_state building and task_state spawning. [ 702.748461] env[62522]: DEBUG nova.compute.manager [req-abd90c76-78d1-4360-a045-f281689a1ca4 req-35178aed-a102-439a-81d5-576b83eae012 service nova] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Received event network-changed-66de060c-1aa7-4119-b646-bd495f55add8 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 702.748883] env[62522]: DEBUG nova.compute.manager [req-abd90c76-78d1-4360-a045-f281689a1ca4 req-35178aed-a102-439a-81d5-576b83eae012 service nova] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Refreshing instance network info cache due to event network-changed-66de060c-1aa7-4119-b646-bd495f55add8. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 702.749306] env[62522]: DEBUG oslo_concurrency.lockutils [req-abd90c76-78d1-4360-a045-f281689a1ca4 req-35178aed-a102-439a-81d5-576b83eae012 service nova] Acquiring lock "refresh_cache-17e1557d-e4cf-45b0-84da-4cbcffe31fb6" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 702.958473] env[62522]: DEBUG nova.network.neutron [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Updating instance_info_cache with network_info: [{"id": "66de060c-1aa7-4119-b646-bd495f55add8", "address": "fa:16:3e:b8:13:30", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66de060c-1a", "ovs_interfaceid": "66de060c-1aa7-4119-b646-bd495f55add8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.971392] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35285492-c3bf-4520-82d2-d2238b5e482c tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Lock "74b6ae10-a595-4139-8eda-38fe1aa298cf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.530s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 703.155993] env[62522]: DEBUG oslo_vmware.api [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415177, 'name': PowerOnVM_Task, 'duration_secs': 1.880743} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.158754] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 703.160359] env[62522]: INFO nova.compute.manager [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Took 9.42 seconds to spawn the instance on the hypervisor. [ 703.160359] env[62522]: DEBUG nova.compute.manager [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 703.160359] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-860674da-8a6d-4d78-a1a6-69308388f910 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.212494] env[62522]: DEBUG oslo_vmware.api [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Task: {'id': task-2415179, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.230462} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.212773] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 703.213601] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-490f5c9b-df1b-491f-b5fb-ba46a91f2004 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.243038] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] 253a2903-2601-4f0a-8882-e7510406f9d5/253a2903-2601-4f0a-8882-e7510406f9d5.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 703.245243] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4daac4ea-79e5-45d2-9a2d-4350668f7bcc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.265980] env[62522]: DEBUG oslo_vmware.api [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Waiting for the task: (returnval){ [ 703.265980] env[62522]: value = "task-2415180" [ 703.265980] env[62522]: _type = "Task" [ 703.265980] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.274940] env[62522]: DEBUG oslo_vmware.api [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Task: {'id': task-2415180, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.441639] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5bbdb465-d2c3-45bf-87be-be6bf023eb76 tempest-ServersAdmin275Test-1236548166 tempest-ServersAdmin275Test-1236548166-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.019s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 703.442842] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.917s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 703.443087] env[62522]: DEBUG nova.objects.instance [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Lazy-loading 'resources' on Instance uuid a804f755-58b2-4350-8726-4e82f60afcdc {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 703.461391] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Releasing lock "refresh_cache-17e1557d-e4cf-45b0-84da-4cbcffe31fb6" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 703.461725] env[62522]: DEBUG nova.compute.manager [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Instance network_info: |[{"id": "66de060c-1aa7-4119-b646-bd495f55add8", "address": "fa:16:3e:b8:13:30", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66de060c-1a", "ovs_interfaceid": "66de060c-1aa7-4119-b646-bd495f55add8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 703.462355] env[62522]: DEBUG oslo_concurrency.lockutils [req-abd90c76-78d1-4360-a045-f281689a1ca4 req-35178aed-a102-439a-81d5-576b83eae012 service nova] Acquired lock "refresh_cache-17e1557d-e4cf-45b0-84da-4cbcffe31fb6" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.462589] env[62522]: DEBUG nova.network.neutron [req-abd90c76-78d1-4360-a045-f281689a1ca4 req-35178aed-a102-439a-81d5-576b83eae012 service nova] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Refreshing network info cache for port 66de060c-1aa7-4119-b646-bd495f55add8 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 703.463700] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b8:13:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f6d1427-d86b-4371-9172-50e4bb0eb1cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '66de060c-1aa7-4119-b646-bd495f55add8', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 703.471403] env[62522]: DEBUG oslo.service.loopingcall [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 703.472831] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 703.473184] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-45ea725f-9d43-4ed8-9d75-483f04c62584 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.494997] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 703.494997] env[62522]: value = "task-2415181" [ 703.494997] env[62522]: _type = "Task" [ 703.494997] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.506449] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415181, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.684122] env[62522]: INFO nova.compute.manager [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Took 39.12 seconds to build instance. [ 703.776940] env[62522]: DEBUG oslo_vmware.api [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Task: {'id': task-2415180, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.007481] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415181, 'name': CreateVM_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.188621] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1c8793cd-a9c3-436b-8d0c-e153e274b142 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "c181ce48-9fe2-4400-9047-f8b5a7159dd3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.714s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 704.280984] env[62522]: DEBUG oslo_vmware.api [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Task: {'id': task-2415180, 'name': ReconfigVM_Task, 'duration_secs': 0.688526} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.281388] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Reconfigured VM instance instance-00000017 to attach disk [datastore1] 253a2903-2601-4f0a-8882-e7510406f9d5/253a2903-2601-4f0a-8882-e7510406f9d5.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 704.282187] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9990ebdc-5c53-4a5e-be2a-8602a3243992 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.289474] env[62522]: DEBUG oslo_vmware.api [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Waiting for the task: (returnval){ [ 704.289474] env[62522]: value = "task-2415182" [ 704.289474] env[62522]: _type = "Task" [ 704.289474] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.297965] env[62522]: DEBUG oslo_vmware.api [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Task: {'id': task-2415182, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.322154] env[62522]: DEBUG nova.network.neutron [req-abd90c76-78d1-4360-a045-f281689a1ca4 req-35178aed-a102-439a-81d5-576b83eae012 service nova] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Updated VIF entry in instance network info cache for port 66de060c-1aa7-4119-b646-bd495f55add8. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 704.322154] env[62522]: DEBUG nova.network.neutron [req-abd90c76-78d1-4360-a045-f281689a1ca4 req-35178aed-a102-439a-81d5-576b83eae012 service nova] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Updating instance_info_cache with network_info: [{"id": "66de060c-1aa7-4119-b646-bd495f55add8", "address": "fa:16:3e:b8:13:30", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66de060c-1a", "ovs_interfaceid": "66de060c-1aa7-4119-b646-bd495f55add8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.511383] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415181, 'name': CreateVM_Task, 'duration_secs': 0.709439} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.512699] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 704.512699] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 704.512699] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.512950] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 704.513205] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-634eecbf-ae25-4d95-aadd-0284fb7f5ba5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.518169] env[62522]: DEBUG oslo_vmware.api [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Waiting for the task: (returnval){ [ 704.518169] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f33419-5251-14ce-0918-b04b3e84830c" [ 704.518169] env[62522]: _type = "Task" [ 704.518169] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.530019] env[62522]: DEBUG oslo_vmware.api [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f33419-5251-14ce-0918-b04b3e84830c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.530019] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13853ef4-debe-4004-b38f-59c31288828d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.536921] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48c2fcfd-b67e-4c76-804f-7651d68b95d7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.572347] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62ecbec0-12a0-44aa-882f-04f96b71bc38 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.580436] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b08f92b0-93a5-4164-9026-e2842f81324a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.594520] env[62522]: DEBUG nova.compute.provider_tree [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 704.692503] env[62522]: DEBUG nova.compute.manager [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 704.802155] env[62522]: DEBUG oslo_vmware.api [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Task: {'id': task-2415182, 'name': Rename_Task, 'duration_secs': 0.252861} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.802525] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 704.802801] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-50c81526-cfff-45ca-994d-cd3b9628a055 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.810846] env[62522]: DEBUG oslo_vmware.api [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Waiting for the task: (returnval){ [ 704.810846] env[62522]: value = "task-2415183" [ 704.810846] env[62522]: _type = "Task" [ 704.810846] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.821990] env[62522]: DEBUG oslo_vmware.api [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Task: {'id': task-2415183, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.825687] env[62522]: DEBUG oslo_concurrency.lockutils [req-abd90c76-78d1-4360-a045-f281689a1ca4 req-35178aed-a102-439a-81d5-576b83eae012 service nova] Releasing lock "refresh_cache-17e1557d-e4cf-45b0-84da-4cbcffe31fb6" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 705.029732] env[62522]: DEBUG oslo_vmware.api [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f33419-5251-14ce-0918-b04b3e84830c, 'name': SearchDatastore_Task, 'duration_secs': 0.019932} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.030099] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 705.030352] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 705.030587] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 705.030737] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.030939] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 705.031229] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3e8b5ead-74a1-462c-a1f7-de0d267fce00 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.040861] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 705.041059] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 705.041811] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb17febd-3245-429b-9d5c-ef063469f30d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.047056] env[62522]: DEBUG oslo_vmware.api [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Waiting for the task: (returnval){ [ 705.047056] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520ded1e-62fd-0c95-71f3-d803339e961c" [ 705.047056] env[62522]: _type = "Task" [ 705.047056] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.054928] env[62522]: DEBUG oslo_vmware.api [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520ded1e-62fd-0c95-71f3-d803339e961c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.098279] env[62522]: DEBUG nova.scheduler.client.report [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 705.219066] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 705.322173] env[62522]: DEBUG oslo_vmware.api [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Task: {'id': task-2415183, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.430550] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquiring lock "d68b472d-2139-4e2d-bb28-7e45d80904cb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 705.430550] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Lock "d68b472d-2139-4e2d-bb28-7e45d80904cb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 705.561036] env[62522]: DEBUG oslo_vmware.api [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520ded1e-62fd-0c95-71f3-d803339e961c, 'name': SearchDatastore_Task, 'duration_secs': 0.015389} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.561036] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-482b1b9d-07a3-4b68-aec8-fc8c0da6edda {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.564497] env[62522]: DEBUG oslo_vmware.api [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Waiting for the task: (returnval){ [ 705.564497] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52fe2e30-89db-8d9f-d09b-8d62675674fd" [ 705.564497] env[62522]: _type = "Task" [ 705.564497] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.573522] env[62522]: DEBUG oslo_vmware.api [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52fe2e30-89db-8d9f-d09b-8d62675674fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.604331] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.161s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 705.609929] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.070s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 705.615534] env[62522]: INFO nova.compute.claims [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 705.641751] env[62522]: INFO nova.scheduler.client.report [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Deleted allocations for instance a804f755-58b2-4350-8726-4e82f60afcdc [ 705.822775] env[62522]: DEBUG oslo_vmware.api [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Task: {'id': task-2415183, 'name': PowerOnVM_Task, 'duration_secs': 0.701033} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.823067] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 705.823336] env[62522]: INFO nova.compute.manager [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Took 9.29 seconds to spawn the instance on the hypervisor. [ 705.823527] env[62522]: DEBUG nova.compute.manager [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 705.824262] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-759649f4-e615-4957-ad54-be4c01c5ad18 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.075606] env[62522]: DEBUG oslo_vmware.api [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52fe2e30-89db-8d9f-d09b-8d62675674fd, 'name': SearchDatastore_Task, 'duration_secs': 0.010301} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.075913] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 706.076193] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 17e1557d-e4cf-45b0-84da-4cbcffe31fb6/17e1557d-e4cf-45b0-84da-4cbcffe31fb6.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 706.076464] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-55a9d544-a0eb-4c65-af69-06ea445824a7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.083848] env[62522]: DEBUG oslo_vmware.api [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Waiting for the task: (returnval){ [ 706.083848] env[62522]: value = "task-2415184" [ 706.083848] env[62522]: _type = "Task" [ 706.083848] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.091870] env[62522]: DEBUG oslo_vmware.api [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': task-2415184, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.153032] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c0c5ad85-764d-4621-98f8-09c04159c31e tempest-InstanceActionsTestJSON-1144647943 tempest-InstanceActionsTestJSON-1144647943-project-member] Lock "a804f755-58b2-4350-8726-4e82f60afcdc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.250s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 706.347507] env[62522]: INFO nova.compute.manager [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Took 41.71 seconds to build instance. [ 706.594822] env[62522]: DEBUG oslo_vmware.api [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': task-2415184, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.850722] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0ae2e33a-4e1e-44d2-a69e-9042b823aceb tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Lock "253a2903-2601-4f0a-8882-e7510406f9d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.833s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 707.099199] env[62522]: DEBUG oslo_vmware.api [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': task-2415184, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.556024} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.100404] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 17e1557d-e4cf-45b0-84da-4cbcffe31fb6/17e1557d-e4cf-45b0-84da-4cbcffe31fb6.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 707.100904] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 707.101246] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-971607ae-ebff-4fdf-9eb7-9bc2afc9c3ca {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.109129] env[62522]: DEBUG oslo_vmware.api [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Waiting for the task: (returnval){ [ 707.109129] env[62522]: value = "task-2415185" [ 707.109129] env[62522]: _type = "Task" [ 707.109129] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.111552] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8360032a-7227-4865-906f-61d8c4614769 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.126077] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95017771-2697-4562-ae50-9ab443ff6185 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.129559] env[62522]: DEBUG oslo_vmware.api [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': task-2415185, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.162109] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d7cf5c1-11da-4ae1-9d52-66558167d442 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.175023] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d33a263-5dd7-4e7c-a111-b323fdefc14c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.187121] env[62522]: DEBUG nova.compute.provider_tree [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 707.354935] env[62522]: DEBUG nova.compute.manager [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 707.624047] env[62522]: DEBUG oslo_vmware.api [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': task-2415185, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.184792} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.624047] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 707.624047] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5dbb8bb-4d41-4ab0-9451-f9c26bb592a7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.646115] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Reconfiguring VM instance instance-00000018 to attach disk [datastore1] 17e1557d-e4cf-45b0-84da-4cbcffe31fb6/17e1557d-e4cf-45b0-84da-4cbcffe31fb6.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 707.646396] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-abf6f8c8-c219-40e4-8a19-b12bfdf1ce8c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.666609] env[62522]: DEBUG oslo_vmware.api [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Waiting for the task: (returnval){ [ 707.666609] env[62522]: value = "task-2415186" [ 707.666609] env[62522]: _type = "Task" [ 707.666609] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.674138] env[62522]: DEBUG oslo_vmware.api [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': task-2415186, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.689164] env[62522]: DEBUG nova.scheduler.client.report [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 707.880332] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 708.179914] env[62522]: DEBUG oslo_vmware.api [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': task-2415186, 'name': ReconfigVM_Task, 'duration_secs': 0.355783} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.180304] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Reconfigured VM instance instance-00000018 to attach disk [datastore1] 17e1557d-e4cf-45b0-84da-4cbcffe31fb6/17e1557d-e4cf-45b0-84da-4cbcffe31fb6.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 708.180971] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a855ba7a-203f-4cc4-b243-9110f485f093 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.194868] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.585s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 708.195652] env[62522]: DEBUG nova.compute.manager [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 708.198021] env[62522]: DEBUG oslo_vmware.api [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Waiting for the task: (returnval){ [ 708.198021] env[62522]: value = "task-2415187" [ 708.198021] env[62522]: _type = "Task" [ 708.198021] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.198284] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.007s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 708.198524] env[62522]: DEBUG nova.objects.instance [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Lazy-loading 'resources' on Instance uuid 9a098809-cc26-4210-b09e-b7825c406294 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 708.209014] env[62522]: DEBUG oslo_vmware.api [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': task-2415187, 'name': Rename_Task} progress is 10%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.705824] env[62522]: DEBUG nova.compute.utils [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 708.716295] env[62522]: DEBUG nova.compute.manager [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 708.716295] env[62522]: DEBUG nova.network.neutron [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 708.723479] env[62522]: DEBUG oslo_vmware.api [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': task-2415187, 'name': Rename_Task, 'duration_secs': 0.142303} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.723741] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 708.723978] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b3736187-3d4f-4498-85e9-83b2ba51ac55 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.731056] env[62522]: DEBUG oslo_vmware.api [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Waiting for the task: (returnval){ [ 708.731056] env[62522]: value = "task-2415188" [ 708.731056] env[62522]: _type = "Task" [ 708.731056] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.740025] env[62522]: DEBUG oslo_vmware.api [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': task-2415188, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.803659] env[62522]: DEBUG nova.policy [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fb193a7b00704d0d97429f6efc17ce98', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ec421e0535f04c2ba17759e8342e1897', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 709.218652] env[62522]: DEBUG nova.compute.manager [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 709.246829] env[62522]: DEBUG oslo_vmware.api [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': task-2415188, 'name': PowerOnVM_Task, 'duration_secs': 0.471697} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.252014] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 709.252014] env[62522]: INFO nova.compute.manager [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Took 9.08 seconds to spawn the instance on the hypervisor. [ 709.252014] env[62522]: DEBUG nova.compute.manager [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 709.252014] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0eb2c9c-813e-4a62-8520-175434c464fc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.285277] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe32d63-38a6-46d0-b7fa-8c36c9a5ddba {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.297491] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab137513-0f2e-4d60-ba70-c0124cb0afc1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.332717] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90839e85-4ebb-4c11-a58e-92cc83440453 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.341161] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a96195b-2c87-4571-9ac7-ff83d6c63489 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.358641] env[62522]: DEBUG nova.compute.provider_tree [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 709.361237] env[62522]: DEBUG nova.network.neutron [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Successfully created port: ca4e7776-76bf-40fc-ac2a-ac8917ca2978 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 709.581644] env[62522]: DEBUG nova.compute.manager [req-0ed43e5a-ab06-459c-a74b-0fbbca7ebbdb req-7a11c727-ea2c-45de-946e-e004dc6c81de service nova] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Received event network-changed-c470ec3e-5ac7-49ca-a9e0-f0a1943f0ec9 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 709.581827] env[62522]: DEBUG nova.compute.manager [req-0ed43e5a-ab06-459c-a74b-0fbbca7ebbdb req-7a11c727-ea2c-45de-946e-e004dc6c81de service nova] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Refreshing instance network info cache due to event network-changed-c470ec3e-5ac7-49ca-a9e0-f0a1943f0ec9. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 709.582274] env[62522]: DEBUG oslo_concurrency.lockutils [req-0ed43e5a-ab06-459c-a74b-0fbbca7ebbdb req-7a11c727-ea2c-45de-946e-e004dc6c81de service nova] Acquiring lock "refresh_cache-253a2903-2601-4f0a-8882-e7510406f9d5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.582490] env[62522]: DEBUG oslo_concurrency.lockutils [req-0ed43e5a-ab06-459c-a74b-0fbbca7ebbdb req-7a11c727-ea2c-45de-946e-e004dc6c81de service nova] Acquired lock "refresh_cache-253a2903-2601-4f0a-8882-e7510406f9d5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.582654] env[62522]: DEBUG nova.network.neutron [req-0ed43e5a-ab06-459c-a74b-0fbbca7ebbdb req-7a11c727-ea2c-45de-946e-e004dc6c81de service nova] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Refreshing network info cache for port c470ec3e-5ac7-49ca-a9e0-f0a1943f0ec9 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 709.777012] env[62522]: INFO nova.compute.manager [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Took 39.74 seconds to build instance. [ 709.865323] env[62522]: DEBUG nova.scheduler.client.report [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 710.234720] env[62522]: DEBUG nova.compute.manager [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 710.265062] env[62522]: DEBUG nova.virt.hardware [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 710.265319] env[62522]: DEBUG nova.virt.hardware [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 710.265472] env[62522]: DEBUG nova.virt.hardware [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 710.265647] env[62522]: DEBUG nova.virt.hardware [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 710.265785] env[62522]: DEBUG nova.virt.hardware [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 710.265926] env[62522]: DEBUG nova.virt.hardware [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 710.266140] env[62522]: DEBUG nova.virt.hardware [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 710.266320] env[62522]: DEBUG nova.virt.hardware [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 710.266512] env[62522]: DEBUG nova.virt.hardware [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 710.266682] env[62522]: DEBUG nova.virt.hardware [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 710.266854] env[62522]: DEBUG nova.virt.hardware [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 710.267765] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22d23344-27ee-422f-9abd-6271dad93979 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.276880] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb57f5c3-cf9c-48f1-90da-c18d8dfd1314 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.283201] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bb4b952f-5b93-4196-aa6d-25186e7c69f4 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Lock "17e1557d-e4cf-45b0-84da-4cbcffe31fb6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.447s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.372019] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.171s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.372019] env[62522]: DEBUG oslo_concurrency.lockutils [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.724s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 710.374878] env[62522]: INFO nova.compute.claims [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 710.401353] env[62522]: INFO nova.scheduler.client.report [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Deleted allocations for instance 9a098809-cc26-4210-b09e-b7825c406294 [ 710.429821] env[62522]: DEBUG nova.network.neutron [req-0ed43e5a-ab06-459c-a74b-0fbbca7ebbdb req-7a11c727-ea2c-45de-946e-e004dc6c81de service nova] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Updated VIF entry in instance network info cache for port c470ec3e-5ac7-49ca-a9e0-f0a1943f0ec9. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 710.430945] env[62522]: DEBUG nova.network.neutron [req-0ed43e5a-ab06-459c-a74b-0fbbca7ebbdb req-7a11c727-ea2c-45de-946e-e004dc6c81de service nova] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Updating instance_info_cache with network_info: [{"id": "c470ec3e-5ac7-49ca-a9e0-f0a1943f0ec9", "address": "fa:16:3e:b9:ef:f1", "network": {"id": "a2389999-ffd5-4ea6-a9b5-2ee7151a9810", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-398612397-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13784066a83f4c8a83f6d65e62a1e6df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc470ec3e-5a", "ovs_interfaceid": "c470ec3e-5ac7-49ca-a9e0-f0a1943f0ec9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 710.795112] env[62522]: DEBUG nova.compute.manager [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 710.914311] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c35a86f5-7b6b-44ca-b672-5a2d0de671bf tempest-ServersAdmin275Test-1050466741 tempest-ServersAdmin275Test-1050466741-project-member] Lock "9a098809-cc26-4210-b09e-b7825c406294" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.986s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.935275] env[62522]: DEBUG oslo_concurrency.lockutils [req-0ed43e5a-ab06-459c-a74b-0fbbca7ebbdb req-7a11c727-ea2c-45de-946e-e004dc6c81de service nova] Releasing lock "refresh_cache-253a2903-2601-4f0a-8882-e7510406f9d5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 711.243504] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c908d95-9ef8-4a85-841a-1fadfd55da24 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.251871] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6f9a0480-b84a-4705-9cd5-d6b1da3f179b tempest-ServersAdminNegativeTestJSON-76554634 tempest-ServersAdminNegativeTestJSON-76554634-project-admin] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Suspending the VM {{(pid=62522) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 711.252643] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-1e5780ee-0bd9-4a00-b167-a831a1ea8610 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.261699] env[62522]: DEBUG oslo_vmware.api [None req-6f9a0480-b84a-4705-9cd5-d6b1da3f179b tempest-ServersAdminNegativeTestJSON-76554634 tempest-ServersAdminNegativeTestJSON-76554634-project-admin] Waiting for the task: (returnval){ [ 711.261699] env[62522]: value = "task-2415189" [ 711.261699] env[62522]: _type = "Task" [ 711.261699] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.273876] env[62522]: DEBUG oslo_vmware.api [None req-6f9a0480-b84a-4705-9cd5-d6b1da3f179b tempest-ServersAdminNegativeTestJSON-76554634 tempest-ServersAdminNegativeTestJSON-76554634-project-admin] Task: {'id': task-2415189, 'name': SuspendVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.330989] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 711.412118] env[62522]: DEBUG nova.network.neutron [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Successfully updated port: ca4e7776-76bf-40fc-ac2a-ac8917ca2978 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 711.771251] env[62522]: DEBUG oslo_vmware.api [None req-6f9a0480-b84a-4705-9cd5-d6b1da3f179b tempest-ServersAdminNegativeTestJSON-76554634 tempest-ServersAdminNegativeTestJSON-76554634-project-admin] Task: {'id': task-2415189, 'name': SuspendVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.916911] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Acquiring lock "refresh_cache-cce5f0d4-364d-4295-a27d-44ca8585f803" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.916911] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Acquired lock "refresh_cache-cce5f0d4-364d-4295-a27d-44ca8585f803" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.916911] env[62522]: DEBUG nova.network.neutron [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 711.958184] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f81eb1a4-71cc-4469-b0f4-dc383dbe0553 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.966746] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ca5cd2c-1a37-46df-8450-a65da795a1c1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.005830] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff8113f6-27fb-4242-8a11-3d9d903dd722 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.013781] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df464d39-ec9c-4129-bd1c-b248096adc12 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.028329] env[62522]: DEBUG nova.compute.provider_tree [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 712.283106] env[62522]: DEBUG oslo_vmware.api [None req-6f9a0480-b84a-4705-9cd5-d6b1da3f179b tempest-ServersAdminNegativeTestJSON-76554634 tempest-ServersAdminNegativeTestJSON-76554634-project-admin] Task: {'id': task-2415189, 'name': SuspendVM_Task, 'duration_secs': 0.668291} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.283623] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6f9a0480-b84a-4705-9cd5-d6b1da3f179b tempest-ServersAdminNegativeTestJSON-76554634 tempest-ServersAdminNegativeTestJSON-76554634-project-admin] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Suspended the VM {{(pid=62522) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 712.283623] env[62522]: DEBUG nova.compute.manager [None req-6f9a0480-b84a-4705-9cd5-d6b1da3f179b tempest-ServersAdminNegativeTestJSON-76554634 tempest-ServersAdminNegativeTestJSON-76554634-project-admin] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 712.284511] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93bf48d2-2061-4b68-9c72-8d39c932b6dc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.470716] env[62522]: DEBUG nova.network.neutron [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 712.522862] env[62522]: DEBUG nova.compute.manager [req-ea7173c4-ecb6-48be-ba0b-8b859bb7b655 req-fcd9fb3e-7755-4758-9797-eebc574d5768 service nova] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Received event network-vif-plugged-ca4e7776-76bf-40fc-ac2a-ac8917ca2978 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 712.522862] env[62522]: DEBUG oslo_concurrency.lockutils [req-ea7173c4-ecb6-48be-ba0b-8b859bb7b655 req-fcd9fb3e-7755-4758-9797-eebc574d5768 service nova] Acquiring lock "cce5f0d4-364d-4295-a27d-44ca8585f803-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.522862] env[62522]: DEBUG oslo_concurrency.lockutils [req-ea7173c4-ecb6-48be-ba0b-8b859bb7b655 req-fcd9fb3e-7755-4758-9797-eebc574d5768 service nova] Lock "cce5f0d4-364d-4295-a27d-44ca8585f803-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.522862] env[62522]: DEBUG oslo_concurrency.lockutils [req-ea7173c4-ecb6-48be-ba0b-8b859bb7b655 req-fcd9fb3e-7755-4758-9797-eebc574d5768 service nova] Lock "cce5f0d4-364d-4295-a27d-44ca8585f803-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 712.523320] env[62522]: DEBUG nova.compute.manager [req-ea7173c4-ecb6-48be-ba0b-8b859bb7b655 req-fcd9fb3e-7755-4758-9797-eebc574d5768 service nova] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] No waiting events found dispatching network-vif-plugged-ca4e7776-76bf-40fc-ac2a-ac8917ca2978 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 712.523320] env[62522]: WARNING nova.compute.manager [req-ea7173c4-ecb6-48be-ba0b-8b859bb7b655 req-fcd9fb3e-7755-4758-9797-eebc574d5768 service nova] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Received unexpected event network-vif-plugged-ca4e7776-76bf-40fc-ac2a-ac8917ca2978 for instance with vm_state building and task_state spawning. [ 712.523455] env[62522]: DEBUG nova.compute.manager [req-ea7173c4-ecb6-48be-ba0b-8b859bb7b655 req-fcd9fb3e-7755-4758-9797-eebc574d5768 service nova] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Received event network-changed-ca4e7776-76bf-40fc-ac2a-ac8917ca2978 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 712.523596] env[62522]: DEBUG nova.compute.manager [req-ea7173c4-ecb6-48be-ba0b-8b859bb7b655 req-fcd9fb3e-7755-4758-9797-eebc574d5768 service nova] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Refreshing instance network info cache due to event network-changed-ca4e7776-76bf-40fc-ac2a-ac8917ca2978. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 712.523763] env[62522]: DEBUG oslo_concurrency.lockutils [req-ea7173c4-ecb6-48be-ba0b-8b859bb7b655 req-fcd9fb3e-7755-4758-9797-eebc574d5768 service nova] Acquiring lock "refresh_cache-cce5f0d4-364d-4295-a27d-44ca8585f803" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 712.533114] env[62522]: DEBUG nova.scheduler.client.report [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 712.765642] env[62522]: DEBUG nova.network.neutron [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Updating instance_info_cache with network_info: [{"id": "ca4e7776-76bf-40fc-ac2a-ac8917ca2978", "address": "fa:16:3e:fa:a4:67", "network": {"id": "27951c52-e28e-4c94-968c-c1b5ddd6b58b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1545103257-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ec421e0535f04c2ba17759e8342e1897", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca4e7776-76", "ovs_interfaceid": "ca4e7776-76bf-40fc-ac2a-ac8917ca2978", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.042951] env[62522]: DEBUG oslo_concurrency.lockutils [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.670s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 713.042951] env[62522]: DEBUG nova.compute.manager [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 713.048683] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.332s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 713.048683] env[62522]: DEBUG nova.objects.instance [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Lazy-loading 'resources' on Instance uuid 4de70165-c28f-44b7-a01a-caa0787170b8 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 713.161425] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Acquiring lock "a185273e-cdaf-4967-832b-f75014b7b3f4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 713.161722] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Lock "a185273e-cdaf-4967-832b-f75014b7b3f4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 713.271118] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Releasing lock "refresh_cache-cce5f0d4-364d-4295-a27d-44ca8585f803" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 713.271492] env[62522]: DEBUG nova.compute.manager [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Instance network_info: |[{"id": "ca4e7776-76bf-40fc-ac2a-ac8917ca2978", "address": "fa:16:3e:fa:a4:67", "network": {"id": "27951c52-e28e-4c94-968c-c1b5ddd6b58b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1545103257-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ec421e0535f04c2ba17759e8342e1897", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca4e7776-76", "ovs_interfaceid": "ca4e7776-76bf-40fc-ac2a-ac8917ca2978", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 713.271816] env[62522]: DEBUG oslo_concurrency.lockutils [req-ea7173c4-ecb6-48be-ba0b-8b859bb7b655 req-fcd9fb3e-7755-4758-9797-eebc574d5768 service nova] Acquired lock "refresh_cache-cce5f0d4-364d-4295-a27d-44ca8585f803" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.272013] env[62522]: DEBUG nova.network.neutron [req-ea7173c4-ecb6-48be-ba0b-8b859bb7b655 req-fcd9fb3e-7755-4758-9797-eebc574d5768 service nova] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Refreshing network info cache for port ca4e7776-76bf-40fc-ac2a-ac8917ca2978 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 713.273294] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fa:a4:67', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db1f7867-8524-469c-ab47-d2c9e2751d98', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ca4e7776-76bf-40fc-ac2a-ac8917ca2978', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 713.287325] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Creating folder: Project (ec421e0535f04c2ba17759e8342e1897). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 713.292205] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-22b20f4f-c01c-4ece-a73e-aac533261af3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.307060] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Created folder: Project (ec421e0535f04c2ba17759e8342e1897) in parent group-v489562. [ 713.307468] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Creating folder: Instances. Parent ref: group-v489633. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 713.307845] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-134da2a8-7dd5-46bf-beec-c0e8ba062418 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.319539] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Created folder: Instances in parent group-v489633. [ 713.319837] env[62522]: DEBUG oslo.service.loopingcall [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 713.320070] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 713.320284] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f0288a34-9a19-4a78-b2fa-df3133cd7a93 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.353461] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 713.353461] env[62522]: value = "task-2415192" [ 713.353461] env[62522]: _type = "Task" [ 713.353461] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.365861] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415192, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.550804] env[62522]: DEBUG nova.compute.utils [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 713.554034] env[62522]: DEBUG nova.compute.manager [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 713.554258] env[62522]: DEBUG nova.network.neutron [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 713.625381] env[62522]: DEBUG nova.policy [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a10b77f3502a4e51a5e599b823f08db2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '962664c996f24cf9ae192f79fae18ca4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 713.867789] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415192, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.872931] env[62522]: DEBUG nova.network.neutron [req-ea7173c4-ecb6-48be-ba0b-8b859bb7b655 req-fcd9fb3e-7755-4758-9797-eebc574d5768 service nova] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Updated VIF entry in instance network info cache for port ca4e7776-76bf-40fc-ac2a-ac8917ca2978. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 713.872931] env[62522]: DEBUG nova.network.neutron [req-ea7173c4-ecb6-48be-ba0b-8b859bb7b655 req-fcd9fb3e-7755-4758-9797-eebc574d5768 service nova] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Updating instance_info_cache with network_info: [{"id": "ca4e7776-76bf-40fc-ac2a-ac8917ca2978", "address": "fa:16:3e:fa:a4:67", "network": {"id": "27951c52-e28e-4c94-968c-c1b5ddd6b58b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1545103257-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ec421e0535f04c2ba17759e8342e1897", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca4e7776-76", "ovs_interfaceid": "ca4e7776-76bf-40fc-ac2a-ac8917ca2978", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.057354] env[62522]: DEBUG nova.compute.manager [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 714.156037] env[62522]: DEBUG nova.network.neutron [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Successfully created port: 6683db6f-edf5-4273-b92a-cb688e7baa82 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 714.189013] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2325dcfb-90c2-4500-9dd1-74fb1a1cd59f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.198286] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfe0b9f6-7099-43df-ba34-36415dba466c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.233363] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e261f358-aa1a-4b37-8f93-179174e2c59c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.243793] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d658f15-44cf-446e-84f1-e3e41f8e5d16 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.259308] env[62522]: DEBUG nova.compute.provider_tree [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 714.366858] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415192, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.378951] env[62522]: DEBUG oslo_concurrency.lockutils [req-ea7173c4-ecb6-48be-ba0b-8b859bb7b655 req-fcd9fb3e-7755-4758-9797-eebc574d5768 service nova] Releasing lock "refresh_cache-cce5f0d4-364d-4295-a27d-44ca8585f803" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 714.763395] env[62522]: DEBUG nova.scheduler.client.report [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 714.867658] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415192, 'name': CreateVM_Task, 'duration_secs': 1.378035} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.867658] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 714.868287] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 714.868628] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.868944] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 714.869285] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21e2ead1-9676-476e-9275-15d28b13c141 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.875014] env[62522]: DEBUG oslo_vmware.api [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Waiting for the task: (returnval){ [ 714.875014] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]523dd69e-57fe-1cd3-34c6-f3d3b5b03746" [ 714.875014] env[62522]: _type = "Task" [ 714.875014] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.884738] env[62522]: DEBUG oslo_vmware.api [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]523dd69e-57fe-1cd3-34c6-f3d3b5b03746, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.064987] env[62522]: DEBUG nova.compute.manager [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 715.096630] env[62522]: DEBUG nova.virt.hardware [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 715.096864] env[62522]: DEBUG nova.virt.hardware [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 715.097031] env[62522]: DEBUG nova.virt.hardware [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 715.097341] env[62522]: DEBUG nova.virt.hardware [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 715.097412] env[62522]: DEBUG nova.virt.hardware [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 715.097510] env[62522]: DEBUG nova.virt.hardware [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 715.097715] env[62522]: DEBUG nova.virt.hardware [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 715.097865] env[62522]: DEBUG nova.virt.hardware [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 715.098270] env[62522]: DEBUG nova.virt.hardware [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 715.098466] env[62522]: DEBUG nova.virt.hardware [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 715.098605] env[62522]: DEBUG nova.virt.hardware [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 715.099478] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bce236a2-7380-401e-a807-d3dd3cbe282c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.107853] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ada91c7-db9f-4ce4-89d9-5523e6815c67 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.174110] env[62522]: DEBUG nova.compute.manager [req-55045b3e-01b1-47c6-9ea9-914af2805d0b req-9debcd74-d1ab-44f9-a3f0-8bf9e1a123d9 service nova] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Received event network-changed-c470ec3e-5ac7-49ca-a9e0-f0a1943f0ec9 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 715.174333] env[62522]: DEBUG nova.compute.manager [req-55045b3e-01b1-47c6-9ea9-914af2805d0b req-9debcd74-d1ab-44f9-a3f0-8bf9e1a123d9 service nova] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Refreshing instance network info cache due to event network-changed-c470ec3e-5ac7-49ca-a9e0-f0a1943f0ec9. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 715.174535] env[62522]: DEBUG oslo_concurrency.lockutils [req-55045b3e-01b1-47c6-9ea9-914af2805d0b req-9debcd74-d1ab-44f9-a3f0-8bf9e1a123d9 service nova] Acquiring lock "refresh_cache-253a2903-2601-4f0a-8882-e7510406f9d5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 715.174678] env[62522]: DEBUG oslo_concurrency.lockutils [req-55045b3e-01b1-47c6-9ea9-914af2805d0b req-9debcd74-d1ab-44f9-a3f0-8bf9e1a123d9 service nova] Acquired lock "refresh_cache-253a2903-2601-4f0a-8882-e7510406f9d5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.174838] env[62522]: DEBUG nova.network.neutron [req-55045b3e-01b1-47c6-9ea9-914af2805d0b req-9debcd74-d1ab-44f9-a3f0-8bf9e1a123d9 service nova] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Refreshing network info cache for port c470ec3e-5ac7-49ca-a9e0-f0a1943f0ec9 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 715.271750] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.225s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.275431] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.134s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 715.276969] env[62522]: INFO nova.compute.claims [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 715.305017] env[62522]: INFO nova.scheduler.client.report [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Deleted allocations for instance 4de70165-c28f-44b7-a01a-caa0787170b8 [ 715.387591] env[62522]: DEBUG oslo_vmware.api [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]523dd69e-57fe-1cd3-34c6-f3d3b5b03746, 'name': SearchDatastore_Task, 'duration_secs': 0.01141} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.387896] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 715.388144] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 715.388372] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 715.388757] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.388757] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 715.388970] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-010ea71b-3a61-4cff-bd37-bd96664f9f61 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.398922] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 715.399140] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 715.399885] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0c6b614-e4df-4b95-bf84-61c2a788d643 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.405882] env[62522]: DEBUG oslo_vmware.api [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Waiting for the task: (returnval){ [ 715.405882] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d04ab1-7409-5675-960a-fd73ad24fec8" [ 715.405882] env[62522]: _type = "Task" [ 715.405882] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.414350] env[62522]: DEBUG oslo_vmware.api [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d04ab1-7409-5675-960a-fd73ad24fec8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.813154] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c6b73a30-0a83-4e92-8cf1-b4bdceb9be4d tempest-ServerAddressesNegativeTestJSON-1067668060 tempest-ServerAddressesNegativeTestJSON-1067668060-project-member] Lock "4de70165-c28f-44b7-a01a-caa0787170b8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.259s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.921544] env[62522]: DEBUG oslo_vmware.api [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d04ab1-7409-5675-960a-fd73ad24fec8, 'name': SearchDatastore_Task, 'duration_secs': 0.008721} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.922346] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d48ce56-c952-4810-91dc-e306e1cf5de8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.928012] env[62522]: DEBUG oslo_vmware.api [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Waiting for the task: (returnval){ [ 715.928012] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529248d4-8004-41d0-1125-bbd47c097546" [ 715.928012] env[62522]: _type = "Task" [ 715.928012] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.935867] env[62522]: DEBUG oslo_vmware.api [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529248d4-8004-41d0-1125-bbd47c097546, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.050134] env[62522]: DEBUG nova.network.neutron [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Successfully updated port: 6683db6f-edf5-4273-b92a-cb688e7baa82 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 716.097754] env[62522]: DEBUG nova.network.neutron [req-55045b3e-01b1-47c6-9ea9-914af2805d0b req-9debcd74-d1ab-44f9-a3f0-8bf9e1a123d9 service nova] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Updated VIF entry in instance network info cache for port c470ec3e-5ac7-49ca-a9e0-f0a1943f0ec9. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 716.098133] env[62522]: DEBUG nova.network.neutron [req-55045b3e-01b1-47c6-9ea9-914af2805d0b req-9debcd74-d1ab-44f9-a3f0-8bf9e1a123d9 service nova] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Updating instance_info_cache with network_info: [{"id": "c470ec3e-5ac7-49ca-a9e0-f0a1943f0ec9", "address": "fa:16:3e:b9:ef:f1", "network": {"id": "a2389999-ffd5-4ea6-a9b5-2ee7151a9810", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-398612397-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13784066a83f4c8a83f6d65e62a1e6df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc470ec3e-5a", "ovs_interfaceid": "c470ec3e-5ac7-49ca-a9e0-f0a1943f0ec9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.356845] env[62522]: DEBUG nova.compute.manager [req-ef82c70c-97b3-4522-a725-94eff98739cd req-663c1f8d-b7b0-47ec-a47c-5b702cd72402 service nova] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Received event network-vif-plugged-6683db6f-edf5-4273-b92a-cb688e7baa82 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 716.357124] env[62522]: DEBUG oslo_concurrency.lockutils [req-ef82c70c-97b3-4522-a725-94eff98739cd req-663c1f8d-b7b0-47ec-a47c-5b702cd72402 service nova] Acquiring lock "cd69a052-369b-4809-baf0-a1aec44f4ab5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 716.357401] env[62522]: DEBUG oslo_concurrency.lockutils [req-ef82c70c-97b3-4522-a725-94eff98739cd req-663c1f8d-b7b0-47ec-a47c-5b702cd72402 service nova] Lock "cd69a052-369b-4809-baf0-a1aec44f4ab5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 716.357614] env[62522]: DEBUG oslo_concurrency.lockutils [req-ef82c70c-97b3-4522-a725-94eff98739cd req-663c1f8d-b7b0-47ec-a47c-5b702cd72402 service nova] Lock "cd69a052-369b-4809-baf0-a1aec44f4ab5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 716.357828] env[62522]: DEBUG nova.compute.manager [req-ef82c70c-97b3-4522-a725-94eff98739cd req-663c1f8d-b7b0-47ec-a47c-5b702cd72402 service nova] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] No waiting events found dispatching network-vif-plugged-6683db6f-edf5-4273-b92a-cb688e7baa82 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 716.361043] env[62522]: WARNING nova.compute.manager [req-ef82c70c-97b3-4522-a725-94eff98739cd req-663c1f8d-b7b0-47ec-a47c-5b702cd72402 service nova] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Received unexpected event network-vif-plugged-6683db6f-edf5-4273-b92a-cb688e7baa82 for instance with vm_state building and task_state spawning. [ 716.441081] env[62522]: DEBUG oslo_vmware.api [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529248d4-8004-41d0-1125-bbd47c097546, 'name': SearchDatastore_Task, 'duration_secs': 0.009522} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.441802] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 716.442199] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] cce5f0d4-364d-4295-a27d-44ca8585f803/cce5f0d4-364d-4295-a27d-44ca8585f803.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 716.443025] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e5ed25fc-93c2-48dc-af7a-a05e1d9674cf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.452141] env[62522]: DEBUG oslo_vmware.api [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Waiting for the task: (returnval){ [ 716.452141] env[62522]: value = "task-2415193" [ 716.452141] env[62522]: _type = "Task" [ 716.452141] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.458846] env[62522]: DEBUG oslo_vmware.api [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415193, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.552911] env[62522]: DEBUG oslo_concurrency.lockutils [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "refresh_cache-cd69a052-369b-4809-baf0-a1aec44f4ab5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 716.553099] env[62522]: DEBUG oslo_concurrency.lockutils [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquired lock "refresh_cache-cd69a052-369b-4809-baf0-a1aec44f4ab5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.553528] env[62522]: DEBUG nova.network.neutron [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 716.602346] env[62522]: DEBUG oslo_concurrency.lockutils [req-55045b3e-01b1-47c6-9ea9-914af2805d0b req-9debcd74-d1ab-44f9-a3f0-8bf9e1a123d9 service nova] Releasing lock "refresh_cache-253a2903-2601-4f0a-8882-e7510406f9d5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 716.817196] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquiring lock "3c4c395c-0625-4569-990d-e2d4ad162c14" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 716.817463] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "3c4c395c-0625-4569-990d-e2d4ad162c14" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 716.822850] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21883214-c294-4b5a-a16e-8c82b8bf7eb2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.840586] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed32c109-fc39-4774-bb6b-e80741b07e8c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.876394] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c466957b-a67d-453c-a74f-72715c72a6a6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.888694] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-465fd63e-2850-4780-ad62-b809773405c4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.910093] env[62522]: DEBUG nova.compute.provider_tree [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 716.962729] env[62522]: DEBUG oslo_vmware.api [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415193, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.473753} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.963238] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] cce5f0d4-364d-4295-a27d-44ca8585f803/cce5f0d4-364d-4295-a27d-44ca8585f803.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 716.963460] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 716.963775] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0f5c4e60-673d-4b25-a18e-659a8088db2e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.971130] env[62522]: DEBUG oslo_vmware.api [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Waiting for the task: (returnval){ [ 716.971130] env[62522]: value = "task-2415194" [ 716.971130] env[62522]: _type = "Task" [ 716.971130] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.988875] env[62522]: DEBUG oslo_vmware.api [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415194, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.107992] env[62522]: DEBUG nova.network.neutron [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 717.294063] env[62522]: DEBUG nova.network.neutron [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Updating instance_info_cache with network_info: [{"id": "6683db6f-edf5-4273-b92a-cb688e7baa82", "address": "fa:16:3e:96:b2:84", "network": {"id": "5f1d73d1-ff9e-4081-87cf-8df6294f67c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-892212702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "962664c996f24cf9ae192f79fae18ca4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "419a5b3f-4c6f-4168-9def-746b4d8c5c24", "external-id": "nsx-vlan-transportzone-656", "segmentation_id": 656, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6683db6f-ed", "ovs_interfaceid": "6683db6f-edf5-4273-b92a-cb688e7baa82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.413839] env[62522]: DEBUG nova.scheduler.client.report [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 717.483243] env[62522]: DEBUG oslo_vmware.api [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415194, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062723} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.483243] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 717.483243] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef3d1f7c-1729-4635-beda-ff9dcd09cdd3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.504257] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] cce5f0d4-364d-4295-a27d-44ca8585f803/cce5f0d4-364d-4295-a27d-44ca8585f803.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 717.504580] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f96615cc-36c9-4397-a31a-2f481dc8e197 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.526017] env[62522]: DEBUG oslo_vmware.api [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Waiting for the task: (returnval){ [ 717.526017] env[62522]: value = "task-2415195" [ 717.526017] env[62522]: _type = "Task" [ 717.526017] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.534048] env[62522]: DEBUG oslo_vmware.api [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415195, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.539804] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Acquiring lock "504396d8-077d-4563-91b5-a7a6259eea27" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 717.540030] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Lock "504396d8-077d-4563-91b5-a7a6259eea27" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 717.658101] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Acquiring lock "253a2903-2601-4f0a-8882-e7510406f9d5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 717.658101] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Lock "253a2903-2601-4f0a-8882-e7510406f9d5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 717.658101] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Acquiring lock "253a2903-2601-4f0a-8882-e7510406f9d5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 717.658333] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Lock "253a2903-2601-4f0a-8882-e7510406f9d5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 717.658333] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Lock "253a2903-2601-4f0a-8882-e7510406f9d5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.661908] env[62522]: INFO nova.compute.manager [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Terminating instance [ 717.803175] env[62522]: DEBUG oslo_concurrency.lockutils [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Releasing lock "refresh_cache-cd69a052-369b-4809-baf0-a1aec44f4ab5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 717.804899] env[62522]: DEBUG nova.compute.manager [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Instance network_info: |[{"id": "6683db6f-edf5-4273-b92a-cb688e7baa82", "address": "fa:16:3e:96:b2:84", "network": {"id": "5f1d73d1-ff9e-4081-87cf-8df6294f67c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-892212702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "962664c996f24cf9ae192f79fae18ca4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "419a5b3f-4c6f-4168-9def-746b4d8c5c24", "external-id": "nsx-vlan-transportzone-656", "segmentation_id": 656, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6683db6f-ed", "ovs_interfaceid": "6683db6f-edf5-4273-b92a-cb688e7baa82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 717.805503] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:96:b2:84', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '419a5b3f-4c6f-4168-9def-746b4d8c5c24', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6683db6f-edf5-4273-b92a-cb688e7baa82', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 717.814114] env[62522]: DEBUG oslo.service.loopingcall [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 717.818014] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 717.818014] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6d902615-0ccc-4ca4-9430-28b8ad2a1eea {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.837103] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 717.837103] env[62522]: value = "task-2415196" [ 717.837103] env[62522]: _type = "Task" [ 717.837103] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.846887] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415196, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.921867] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.646s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.922173] env[62522]: DEBUG nova.compute.manager [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 717.929672] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.618s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 717.930414] env[62522]: INFO nova.compute.claims [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 718.035854] env[62522]: DEBUG oslo_vmware.api [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415195, 'name': ReconfigVM_Task, 'duration_secs': 0.275205} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.037135] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Reconfigured VM instance instance-00000019 to attach disk [datastore1] cce5f0d4-364d-4295-a27d-44ca8585f803/cce5f0d4-364d-4295-a27d-44ca8585f803.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 718.037135] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9e402ec1-ebfc-423a-ba96-a4dceb8bdc9e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.044738] env[62522]: DEBUG oslo_vmware.api [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Waiting for the task: (returnval){ [ 718.044738] env[62522]: value = "task-2415197" [ 718.044738] env[62522]: _type = "Task" [ 718.044738] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.054681] env[62522]: DEBUG oslo_vmware.api [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415197, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.169816] env[62522]: DEBUG nova.compute.manager [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 718.169816] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 718.169816] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a7fd1c1-c8dd-4523-b9c8-2b4f85d6cc5f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.182147] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 718.182396] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-afb4ad6c-12c1-4a00-b90e-052e892e24eb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.193138] env[62522]: DEBUG oslo_vmware.api [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Waiting for the task: (returnval){ [ 718.193138] env[62522]: value = "task-2415198" [ 718.193138] env[62522]: _type = "Task" [ 718.193138] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.204894] env[62522]: DEBUG oslo_vmware.api [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Task: {'id': task-2415198, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.347246] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415196, 'name': CreateVM_Task, 'duration_secs': 0.399213} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.347434] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 718.348100] env[62522]: DEBUG oslo_concurrency.lockutils [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 718.348270] env[62522]: DEBUG oslo_concurrency.lockutils [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.348633] env[62522]: DEBUG oslo_concurrency.lockutils [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 718.348900] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4de35637-a5ca-4abf-98da-03419fd5aba2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.353992] env[62522]: DEBUG oslo_vmware.api [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 718.353992] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e4f389-481b-0347-5881-c3f836ea5312" [ 718.353992] env[62522]: _type = "Task" [ 718.353992] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.363886] env[62522]: DEBUG oslo_vmware.api [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e4f389-481b-0347-5881-c3f836ea5312, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.435957] env[62522]: DEBUG nova.compute.utils [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 718.440597] env[62522]: DEBUG nova.compute.manager [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 718.441039] env[62522]: DEBUG nova.network.neutron [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 718.543683] env[62522]: DEBUG nova.policy [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '474f240ea0fe481fab058e413c4d4f95', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '36972e78027c4318ab5be02c371c4934', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 718.555995] env[62522]: DEBUG oslo_vmware.api [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415197, 'name': Rename_Task, 'duration_secs': 0.147327} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.556373] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 718.556525] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2d164490-f832-44ba-a27c-83a30f7db750 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.564162] env[62522]: DEBUG oslo_vmware.api [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Waiting for the task: (returnval){ [ 718.564162] env[62522]: value = "task-2415199" [ 718.564162] env[62522]: _type = "Task" [ 718.564162] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.573513] env[62522]: DEBUG oslo_vmware.api [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415199, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.635054] env[62522]: DEBUG nova.compute.manager [req-cacb9174-1062-4593-b619-135130e65d12 req-d422aa25-d0f0-4deb-beee-f5556e2d120f service nova] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Received event network-changed-6683db6f-edf5-4273-b92a-cb688e7baa82 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 718.635054] env[62522]: DEBUG nova.compute.manager [req-cacb9174-1062-4593-b619-135130e65d12 req-d422aa25-d0f0-4deb-beee-f5556e2d120f service nova] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Refreshing instance network info cache due to event network-changed-6683db6f-edf5-4273-b92a-cb688e7baa82. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 718.635054] env[62522]: DEBUG oslo_concurrency.lockutils [req-cacb9174-1062-4593-b619-135130e65d12 req-d422aa25-d0f0-4deb-beee-f5556e2d120f service nova] Acquiring lock "refresh_cache-cd69a052-369b-4809-baf0-a1aec44f4ab5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 718.635054] env[62522]: DEBUG oslo_concurrency.lockutils [req-cacb9174-1062-4593-b619-135130e65d12 req-d422aa25-d0f0-4deb-beee-f5556e2d120f service nova] Acquired lock "refresh_cache-cd69a052-369b-4809-baf0-a1aec44f4ab5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.635252] env[62522]: DEBUG nova.network.neutron [req-cacb9174-1062-4593-b619-135130e65d12 req-d422aa25-d0f0-4deb-beee-f5556e2d120f service nova] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Refreshing network info cache for port 6683db6f-edf5-4273-b92a-cb688e7baa82 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 718.703635] env[62522]: DEBUG oslo_vmware.api [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Task: {'id': task-2415198, 'name': PowerOffVM_Task, 'duration_secs': 0.219139} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.703911] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 718.704092] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 718.704347] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3cac760c-8275-4f2d-a9c8-1bf56d0512a5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.789445] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 718.789544] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 718.789738] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Deleting the datastore file [datastore1] 253a2903-2601-4f0a-8882-e7510406f9d5 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 718.790016] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be7404a1-0b3a-4e78-95c2-e0a21a150655 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.804957] env[62522]: DEBUG oslo_vmware.api [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Waiting for the task: (returnval){ [ 718.804957] env[62522]: value = "task-2415201" [ 718.804957] env[62522]: _type = "Task" [ 718.804957] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.815156] env[62522]: DEBUG oslo_vmware.api [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Task: {'id': task-2415201, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.866689] env[62522]: DEBUG oslo_vmware.api [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e4f389-481b-0347-5881-c3f836ea5312, 'name': SearchDatastore_Task, 'duration_secs': 0.013066} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.867124] env[62522]: DEBUG oslo_concurrency.lockutils [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 718.867402] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 718.867641] env[62522]: DEBUG oslo_concurrency.lockutils [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 718.867798] env[62522]: DEBUG oslo_concurrency.lockutils [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.868030] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 718.869113] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d5dc248b-a2dd-4d96-b826-edc7cf127a90 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.876991] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 718.877310] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 718.877955] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d66b4230-25ca-45c0-bc2c-ce9ccfaf19eb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.883353] env[62522]: DEBUG oslo_vmware.api [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 718.883353] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52128aa5-bc7d-f710-e2e5-04430d7e75de" [ 718.883353] env[62522]: _type = "Task" [ 718.883353] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.892372] env[62522]: DEBUG oslo_vmware.api [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52128aa5-bc7d-f710-e2e5-04430d7e75de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.942383] env[62522]: DEBUG nova.compute.manager [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 719.074947] env[62522]: DEBUG oslo_vmware.api [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415199, 'name': PowerOnVM_Task} progress is 90%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.241145] env[62522]: DEBUG nova.network.neutron [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Successfully created port: 284f1edc-1e35-4812-9696-c3ef34dfba09 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 719.313114] env[62522]: DEBUG oslo_vmware.api [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Task: {'id': task-2415201, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161557} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.313398] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 719.313596] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 719.313787] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 719.313992] env[62522]: INFO nova.compute.manager [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Took 1.15 seconds to destroy the instance on the hypervisor. [ 719.314239] env[62522]: DEBUG oslo.service.loopingcall [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 719.314433] env[62522]: DEBUG nova.compute.manager [-] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 719.314528] env[62522]: DEBUG nova.network.neutron [-] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 719.402900] env[62522]: DEBUG oslo_vmware.api [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52128aa5-bc7d-f710-e2e5-04430d7e75de, 'name': SearchDatastore_Task, 'duration_secs': 0.010964} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.405967] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbc29c19-d67b-4377-a2f0-65d9d5b26f60 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.411928] env[62522]: DEBUG oslo_vmware.api [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 719.411928] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52620e99-4e49-a1e1-f71f-292703a94952" [ 719.411928] env[62522]: _type = "Task" [ 719.411928] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.426895] env[62522]: DEBUG oslo_vmware.api [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52620e99-4e49-a1e1-f71f-292703a94952, 'name': SearchDatastore_Task, 'duration_secs': 0.009631} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.427242] env[62522]: DEBUG oslo_concurrency.lockutils [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 719.427590] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] cd69a052-369b-4809-baf0-a1aec44f4ab5/cd69a052-369b-4809-baf0-a1aec44f4ab5.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 719.427794] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8c3332fb-c26a-4b23-b308-efcc32462771 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.434682] env[62522]: DEBUG oslo_vmware.api [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 719.434682] env[62522]: value = "task-2415202" [ 719.434682] env[62522]: _type = "Task" [ 719.434682] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.442519] env[62522]: DEBUG oslo_vmware.api [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415202, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.456162] env[62522]: DEBUG nova.network.neutron [req-cacb9174-1062-4593-b619-135130e65d12 req-d422aa25-d0f0-4deb-beee-f5556e2d120f service nova] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Updated VIF entry in instance network info cache for port 6683db6f-edf5-4273-b92a-cb688e7baa82. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 719.456873] env[62522]: DEBUG nova.network.neutron [req-cacb9174-1062-4593-b619-135130e65d12 req-d422aa25-d0f0-4deb-beee-f5556e2d120f service nova] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Updating instance_info_cache with network_info: [{"id": "6683db6f-edf5-4273-b92a-cb688e7baa82", "address": "fa:16:3e:96:b2:84", "network": {"id": "5f1d73d1-ff9e-4081-87cf-8df6294f67c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-892212702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "962664c996f24cf9ae192f79fae18ca4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "419a5b3f-4c6f-4168-9def-746b4d8c5c24", "external-id": "nsx-vlan-transportzone-656", "segmentation_id": 656, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6683db6f-ed", "ovs_interfaceid": "6683db6f-edf5-4273-b92a-cb688e7baa82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.531981] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a57dd0-224d-45c3-969c-1a42cb3223c7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.540278] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da4dc866-6dd9-4840-b373-998881fe88e4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.591539] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2831887b-df34-4821-8960-538c623da0d0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.605214] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4ced942-1628-4a46-a956-5d27e3a01375 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.611144] env[62522]: DEBUG oslo_vmware.api [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415199, 'name': PowerOnVM_Task, 'duration_secs': 0.646788} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.611295] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 719.611566] env[62522]: INFO nova.compute.manager [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Took 9.38 seconds to spawn the instance on the hypervisor. [ 719.611860] env[62522]: DEBUG nova.compute.manager [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 719.613820] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60c2dda5-4090-4065-8782-92dc3d2f2ebd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.629225] env[62522]: DEBUG nova.compute.provider_tree [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 719.946247] env[62522]: DEBUG oslo_vmware.api [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415202, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.47944} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.946540] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] cd69a052-369b-4809-baf0-a1aec44f4ab5/cd69a052-369b-4809-baf0-a1aec44f4ab5.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 719.946764] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 719.947017] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d67b4cd1-676d-4083-8416-72d5dbb76b7d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.953740] env[62522]: DEBUG oslo_vmware.api [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 719.953740] env[62522]: value = "task-2415203" [ 719.953740] env[62522]: _type = "Task" [ 719.953740] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.963387] env[62522]: DEBUG nova.compute.manager [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 719.965483] env[62522]: DEBUG oslo_concurrency.lockutils [req-cacb9174-1062-4593-b619-135130e65d12 req-d422aa25-d0f0-4deb-beee-f5556e2d120f service nova] Releasing lock "refresh_cache-cd69a052-369b-4809-baf0-a1aec44f4ab5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 719.965967] env[62522]: DEBUG oslo_vmware.api [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415203, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.990420] env[62522]: DEBUG nova.virt.hardware [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 719.990992] env[62522]: DEBUG nova.virt.hardware [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 719.992457] env[62522]: DEBUG nova.virt.hardware [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 719.992457] env[62522]: DEBUG nova.virt.hardware [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 719.992457] env[62522]: DEBUG nova.virt.hardware [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 719.992457] env[62522]: DEBUG nova.virt.hardware [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 719.992680] env[62522]: DEBUG nova.virt.hardware [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 719.992805] env[62522]: DEBUG nova.virt.hardware [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 719.993266] env[62522]: DEBUG nova.virt.hardware [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 719.993266] env[62522]: DEBUG nova.virt.hardware [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 719.993458] env[62522]: DEBUG nova.virt.hardware [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 719.994700] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40d06de8-4973-48a6-a1cc-143ce586dec4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.003894] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0de40bcd-7e5d-4f19-a4ed-973493a9adb8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.125806] env[62522]: DEBUG nova.compute.manager [req-5cc37239-ca1a-4c9e-9658-41577f4b50dc req-3ee91659-bf3f-4191-8ad2-d6bd250cff31 service nova] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Received event network-vif-deleted-c470ec3e-5ac7-49ca-a9e0-f0a1943f0ec9 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 720.126043] env[62522]: INFO nova.compute.manager [req-5cc37239-ca1a-4c9e-9658-41577f4b50dc req-3ee91659-bf3f-4191-8ad2-d6bd250cff31 service nova] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Neutron deleted interface c470ec3e-5ac7-49ca-a9e0-f0a1943f0ec9; detaching it from the instance and deleting it from the info cache [ 720.126232] env[62522]: DEBUG nova.network.neutron [req-5cc37239-ca1a-4c9e-9658-41577f4b50dc req-3ee91659-bf3f-4191-8ad2-d6bd250cff31 service nova] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.131652] env[62522]: DEBUG nova.scheduler.client.report [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 720.152844] env[62522]: INFO nova.compute.manager [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Took 38.64 seconds to build instance. [ 720.381599] env[62522]: DEBUG nova.network.neutron [-] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.464849] env[62522]: DEBUG oslo_vmware.api [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415203, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063297} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.465127] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 720.465905] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f5b178-6ca4-4187-b7af-6681a40738a5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.488452] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] cd69a052-369b-4809-baf0-a1aec44f4ab5/cd69a052-369b-4809-baf0-a1aec44f4ab5.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 720.488772] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3753140a-0abb-48da-b276-cd585a6467c7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.507985] env[62522]: DEBUG oslo_vmware.api [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 720.507985] env[62522]: value = "task-2415204" [ 720.507985] env[62522]: _type = "Task" [ 720.507985] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.515611] env[62522]: DEBUG oslo_vmware.api [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415204, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.596924] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d2fde235-ff93-4146-bd0e-9f71f24424f9 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Acquiring lock "17e1557d-e4cf-45b0-84da-4cbcffe31fb6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 720.597278] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d2fde235-ff93-4146-bd0e-9f71f24424f9 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Lock "17e1557d-e4cf-45b0-84da-4cbcffe31fb6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 720.597496] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d2fde235-ff93-4146-bd0e-9f71f24424f9 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Acquiring lock "17e1557d-e4cf-45b0-84da-4cbcffe31fb6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 720.597701] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d2fde235-ff93-4146-bd0e-9f71f24424f9 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Lock "17e1557d-e4cf-45b0-84da-4cbcffe31fb6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 720.597875] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d2fde235-ff93-4146-bd0e-9f71f24424f9 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Lock "17e1557d-e4cf-45b0-84da-4cbcffe31fb6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 720.600074] env[62522]: INFO nova.compute.manager [None req-d2fde235-ff93-4146-bd0e-9f71f24424f9 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Terminating instance [ 720.628661] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ccd32481-2ba4-4950-b3ed-925de5f8e92f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.638326] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90b0f535-00ae-45b0-8fa0-cf0656401dc0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.649788] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.721s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 720.650304] env[62522]: DEBUG nova.compute.manager [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 720.653440] env[62522]: DEBUG oslo_concurrency.lockutils [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 28.023s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 720.653637] env[62522]: DEBUG nova.objects.instance [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62522) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 720.656635] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c62bc51a-5d43-4543-a8c0-5d23cd3d1beb tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Lock "cce5f0d4-364d-4295-a27d-44ca8585f803" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.583s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 720.681742] env[62522]: DEBUG nova.compute.manager [req-5cc37239-ca1a-4c9e-9658-41577f4b50dc req-3ee91659-bf3f-4191-8ad2-d6bd250cff31 service nova] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Detach interface failed, port_id=c470ec3e-5ac7-49ca-a9e0-f0a1943f0ec9, reason: Instance 253a2903-2601-4f0a-8882-e7510406f9d5 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 720.886388] env[62522]: INFO nova.compute.manager [-] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Took 1.57 seconds to deallocate network for instance. [ 721.021864] env[62522]: DEBUG oslo_vmware.api [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415204, 'name': ReconfigVM_Task, 'duration_secs': 0.282756} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.022208] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Reconfigured VM instance instance-0000001a to attach disk [datastore1] cd69a052-369b-4809-baf0-a1aec44f4ab5/cd69a052-369b-4809-baf0-a1aec44f4ab5.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 721.022835] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1056fb34-1536-4b7e-b02b-663c71327178 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.030550] env[62522]: DEBUG oslo_vmware.api [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 721.030550] env[62522]: value = "task-2415205" [ 721.030550] env[62522]: _type = "Task" [ 721.030550] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.039560] env[62522]: DEBUG oslo_vmware.api [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415205, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.104207] env[62522]: DEBUG nova.compute.manager [None req-d2fde235-ff93-4146-bd0e-9f71f24424f9 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 721.104445] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d2fde235-ff93-4146-bd0e-9f71f24424f9 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 721.105472] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c93866b3-155e-4476-88ac-f0bc8fcc36e6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.114401] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d2fde235-ff93-4146-bd0e-9f71f24424f9 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 721.114703] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6b004d42-ddf4-4834-81dd-f9bc40991594 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.158412] env[62522]: DEBUG nova.compute.utils [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 721.163384] env[62522]: DEBUG nova.compute.manager [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 721.163633] env[62522]: DEBUG nova.network.neutron [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 721.173937] env[62522]: DEBUG nova.compute.manager [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 721.181630] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d2fde235-ff93-4146-bd0e-9f71f24424f9 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 721.181854] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d2fde235-ff93-4146-bd0e-9f71f24424f9 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 721.182051] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2fde235-ff93-4146-bd0e-9f71f24424f9 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Deleting the datastore file [datastore1] 17e1557d-e4cf-45b0-84da-4cbcffe31fb6 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 721.185693] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8186c789-4ff5-4dba-9eca-37d09837f883 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.189446] env[62522]: DEBUG oslo_vmware.api [None req-d2fde235-ff93-4146-bd0e-9f71f24424f9 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Waiting for the task: (returnval){ [ 721.189446] env[62522]: value = "task-2415207" [ 721.189446] env[62522]: _type = "Task" [ 721.189446] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.198855] env[62522]: DEBUG oslo_vmware.api [None req-d2fde235-ff93-4146-bd0e-9f71f24424f9 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': task-2415207, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.287119] env[62522]: DEBUG nova.policy [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0f67866a87744599a2af43d9407bdbeb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c84fc4124fca4b12b4d3260601eeee83', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 721.393292] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 721.522085] env[62522]: DEBUG nova.network.neutron [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Successfully updated port: 284f1edc-1e35-4812-9696-c3ef34dfba09 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 721.542622] env[62522]: DEBUG oslo_vmware.api [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415205, 'name': Rename_Task, 'duration_secs': 0.142208} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.542785] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 721.543631] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3fa853e0-f415-4185-b47a-f708f9b1792b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.550174] env[62522]: DEBUG oslo_vmware.api [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 721.550174] env[62522]: value = "task-2415208" [ 721.550174] env[62522]: _type = "Task" [ 721.550174] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.558425] env[62522]: DEBUG oslo_vmware.api [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415208, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.667258] env[62522]: DEBUG nova.compute.manager [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 721.681153] env[62522]: DEBUG oslo_concurrency.lockutils [None req-62728d81-92bf-4f07-bdb0-287ad0d1b750 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.028s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.682218] env[62522]: DEBUG oslo_concurrency.lockutils [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.123s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 721.685345] env[62522]: INFO nova.compute.claims [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 721.705896] env[62522]: DEBUG oslo_vmware.api [None req-d2fde235-ff93-4146-bd0e-9f71f24424f9 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': task-2415207, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142079} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.706083] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2fde235-ff93-4146-bd0e-9f71f24424f9 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 721.707477] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d2fde235-ff93-4146-bd0e-9f71f24424f9 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 721.707797] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d2fde235-ff93-4146-bd0e-9f71f24424f9 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 721.708407] env[62522]: INFO nova.compute.manager [None req-d2fde235-ff93-4146-bd0e-9f71f24424f9 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Took 0.60 seconds to destroy the instance on the hypervisor. [ 721.709096] env[62522]: DEBUG oslo.service.loopingcall [None req-d2fde235-ff93-4146-bd0e-9f71f24424f9 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 721.709457] env[62522]: DEBUG nova.compute.manager [-] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 721.709579] env[62522]: DEBUG nova.network.neutron [-] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 721.718656] env[62522]: DEBUG oslo_concurrency.lockutils [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 722.028056] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Acquiring lock "refresh_cache-8461f823-e48a-42f0-8863-44177565b82d" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 722.028056] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Acquired lock "refresh_cache-8461f823-e48a-42f0-8863-44177565b82d" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.028056] env[62522]: DEBUG nova.network.neutron [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 722.062251] env[62522]: DEBUG oslo_vmware.api [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415208, 'name': PowerOnVM_Task, 'duration_secs': 0.458953} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.062527] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 722.062730] env[62522]: INFO nova.compute.manager [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Took 7.00 seconds to spawn the instance on the hypervisor. [ 722.062907] env[62522]: DEBUG nova.compute.manager [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 722.064164] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df6cde79-5863-4dd5-8893-a1c051a325b0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.164682] env[62522]: DEBUG nova.network.neutron [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Successfully created port: 451cd3c2-eeba-4ad9-a5c5-2ff0a4ee7d71 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 722.590769] env[62522]: INFO nova.compute.manager [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Took 38.98 seconds to build instance. [ 722.593693] env[62522]: DEBUG nova.network.neutron [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 722.676792] env[62522]: DEBUG nova.compute.manager [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 722.713600] env[62522]: DEBUG nova.virt.hardware [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 722.713842] env[62522]: DEBUG nova.virt.hardware [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 722.714008] env[62522]: DEBUG nova.virt.hardware [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 722.714200] env[62522]: DEBUG nova.virt.hardware [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 722.714347] env[62522]: DEBUG nova.virt.hardware [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 722.714492] env[62522]: DEBUG nova.virt.hardware [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 722.714697] env[62522]: DEBUG nova.virt.hardware [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 722.714856] env[62522]: DEBUG nova.virt.hardware [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 722.716265] env[62522]: DEBUG nova.virt.hardware [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 722.716645] env[62522]: DEBUG nova.virt.hardware [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 722.716856] env[62522]: DEBUG nova.virt.hardware [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 722.717763] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c1005a7-6412-4137-8950-befccdea6190 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.728245] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cd8bfb7-4f80-412f-9ed3-9ab953828815 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.966245] env[62522]: DEBUG nova.network.neutron [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Updating instance_info_cache with network_info: [{"id": "284f1edc-1e35-4812-9696-c3ef34dfba09", "address": "fa:16:3e:f4:98:0f", "network": {"id": "b0b85995-9eb6-469b-ab9a-56c124355719", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-292084153-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36972e78027c4318ab5be02c371c4934", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap284f1edc-1e", "ovs_interfaceid": "284f1edc-1e35-4812-9696-c3ef34dfba09", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.971185] env[62522]: DEBUG nova.network.neutron [-] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.985158] env[62522]: DEBUG nova.compute.manager [req-8917dee6-51c5-4ad2-8806-396a9e06aad8 req-e917f6be-6478-4852-8a48-7ae769a2adee service nova] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Received event network-changed-ca4e7776-76bf-40fc-ac2a-ac8917ca2978 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 722.985158] env[62522]: DEBUG nova.compute.manager [req-8917dee6-51c5-4ad2-8806-396a9e06aad8 req-e917f6be-6478-4852-8a48-7ae769a2adee service nova] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Refreshing instance network info cache due to event network-changed-ca4e7776-76bf-40fc-ac2a-ac8917ca2978. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 722.985158] env[62522]: DEBUG oslo_concurrency.lockutils [req-8917dee6-51c5-4ad2-8806-396a9e06aad8 req-e917f6be-6478-4852-8a48-7ae769a2adee service nova] Acquiring lock "refresh_cache-cce5f0d4-364d-4295-a27d-44ca8585f803" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 722.985158] env[62522]: DEBUG oslo_concurrency.lockutils [req-8917dee6-51c5-4ad2-8806-396a9e06aad8 req-e917f6be-6478-4852-8a48-7ae769a2adee service nova] Acquired lock "refresh_cache-cce5f0d4-364d-4295-a27d-44ca8585f803" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.985571] env[62522]: DEBUG nova.network.neutron [req-8917dee6-51c5-4ad2-8806-396a9e06aad8 req-e917f6be-6478-4852-8a48-7ae769a2adee service nova] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Refreshing network info cache for port ca4e7776-76bf-40fc-ac2a-ac8917ca2978 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 723.096160] env[62522]: DEBUG oslo_concurrency.lockutils [None req-152d05e1-9d2a-46cd-b1ee-e3492c3d7749 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "cd69a052-369b-4809-baf0-a1aec44f4ab5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.048s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 723.238928] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa6de46-64b0-429c-81d6-988bc878029c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.251166] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dabe7887-28be-4473-acdb-a291e3c94e47 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.285874] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6eb65cc-801c-4419-98cb-6a33f5777000 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.294032] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9d0fdb4-9871-49b4-80c7-501d3977da9a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.308973] env[62522]: DEBUG nova.compute.provider_tree [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 723.470260] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Releasing lock "refresh_cache-8461f823-e48a-42f0-8863-44177565b82d" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 723.470646] env[62522]: DEBUG nova.compute.manager [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Instance network_info: |[{"id": "284f1edc-1e35-4812-9696-c3ef34dfba09", "address": "fa:16:3e:f4:98:0f", "network": {"id": "b0b85995-9eb6-469b-ab9a-56c124355719", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-292084153-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36972e78027c4318ab5be02c371c4934", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap284f1edc-1e", "ovs_interfaceid": "284f1edc-1e35-4812-9696-c3ef34dfba09", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 723.471717] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:98:0f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '284f1edc-1e35-4812-9696-c3ef34dfba09', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 723.478952] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Creating folder: Project (36972e78027c4318ab5be02c371c4934). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 723.479762] env[62522]: INFO nova.compute.manager [-] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Took 1.77 seconds to deallocate network for instance. [ 723.479762] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-169e8676-f9c8-4bf7-807c-3b86bd443aba {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.492155] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Created folder: Project (36972e78027c4318ab5be02c371c4934) in parent group-v489562. [ 723.492338] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Creating folder: Instances. Parent ref: group-v489637. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 723.492613] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-12257bf8-5f27-45b5-96ab-bbbc29b537c0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.500984] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Created folder: Instances in parent group-v489637. [ 723.501237] env[62522]: DEBUG oslo.service.loopingcall [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 723.501424] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 723.501621] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-809e0365-f514-46d3-a882-95d25b0f7952 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.521487] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 723.521487] env[62522]: value = "task-2415211" [ 723.521487] env[62522]: _type = "Task" [ 723.521487] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.531957] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415211, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.599602] env[62522]: DEBUG nova.compute.manager [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 723.813700] env[62522]: DEBUG nova.scheduler.client.report [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 723.886437] env[62522]: DEBUG nova.network.neutron [req-8917dee6-51c5-4ad2-8806-396a9e06aad8 req-e917f6be-6478-4852-8a48-7ae769a2adee service nova] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Updated VIF entry in instance network info cache for port ca4e7776-76bf-40fc-ac2a-ac8917ca2978. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 723.886835] env[62522]: DEBUG nova.network.neutron [req-8917dee6-51c5-4ad2-8806-396a9e06aad8 req-e917f6be-6478-4852-8a48-7ae769a2adee service nova] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Updating instance_info_cache with network_info: [{"id": "ca4e7776-76bf-40fc-ac2a-ac8917ca2978", "address": "fa:16:3e:fa:a4:67", "network": {"id": "27951c52-e28e-4c94-968c-c1b5ddd6b58b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1545103257-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ec421e0535f04c2ba17759e8342e1897", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca4e7776-76", "ovs_interfaceid": "ca4e7776-76bf-40fc-ac2a-ac8917ca2978", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.987486] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d2fde235-ff93-4146-bd0e-9f71f24424f9 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 724.032375] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415211, 'name': CreateVM_Task, 'duration_secs': 0.311698} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.032559] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 724.033282] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 724.033623] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.034106] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 724.034381] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84ac23eb-49e0-4501-9dbf-070f628d70d1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.039414] env[62522]: DEBUG oslo_vmware.api [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Waiting for the task: (returnval){ [ 724.039414] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526752fc-c5b9-273f-2f1a-43cf8b0c1991" [ 724.039414] env[62522]: _type = "Task" [ 724.039414] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.048982] env[62522]: DEBUG oslo_vmware.api [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526752fc-c5b9-273f-2f1a-43cf8b0c1991, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.124847] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 724.308104] env[62522]: INFO nova.compute.manager [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Rescuing [ 724.308338] env[62522]: DEBUG oslo_concurrency.lockutils [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "refresh_cache-cd69a052-369b-4809-baf0-a1aec44f4ab5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 724.308490] env[62522]: DEBUG oslo_concurrency.lockutils [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquired lock "refresh_cache-cd69a052-369b-4809-baf0-a1aec44f4ab5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.308700] env[62522]: DEBUG nova.network.neutron [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 724.320118] env[62522]: DEBUG oslo_concurrency.lockutils [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.637s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 724.320234] env[62522]: DEBUG nova.compute.manager [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 724.323396] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.510s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 724.323396] env[62522]: DEBUG nova.objects.instance [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lazy-loading 'resources' on Instance uuid 6d8b5429-113b-4280-9851-bf6614dde4a7 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 724.391998] env[62522]: DEBUG oslo_concurrency.lockutils [req-8917dee6-51c5-4ad2-8806-396a9e06aad8 req-e917f6be-6478-4852-8a48-7ae769a2adee service nova] Releasing lock "refresh_cache-cce5f0d4-364d-4295-a27d-44ca8585f803" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.392285] env[62522]: DEBUG nova.compute.manager [req-8917dee6-51c5-4ad2-8806-396a9e06aad8 req-e917f6be-6478-4852-8a48-7ae769a2adee service nova] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Received event network-vif-plugged-284f1edc-1e35-4812-9696-c3ef34dfba09 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 724.392477] env[62522]: DEBUG oslo_concurrency.lockutils [req-8917dee6-51c5-4ad2-8806-396a9e06aad8 req-e917f6be-6478-4852-8a48-7ae769a2adee service nova] Acquiring lock "8461f823-e48a-42f0-8863-44177565b82d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 724.392674] env[62522]: DEBUG oslo_concurrency.lockutils [req-8917dee6-51c5-4ad2-8806-396a9e06aad8 req-e917f6be-6478-4852-8a48-7ae769a2adee service nova] Lock "8461f823-e48a-42f0-8863-44177565b82d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 724.392882] env[62522]: DEBUG oslo_concurrency.lockutils [req-8917dee6-51c5-4ad2-8806-396a9e06aad8 req-e917f6be-6478-4852-8a48-7ae769a2adee service nova] Lock "8461f823-e48a-42f0-8863-44177565b82d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 724.393077] env[62522]: DEBUG nova.compute.manager [req-8917dee6-51c5-4ad2-8806-396a9e06aad8 req-e917f6be-6478-4852-8a48-7ae769a2adee service nova] [instance: 8461f823-e48a-42f0-8863-44177565b82d] No waiting events found dispatching network-vif-plugged-284f1edc-1e35-4812-9696-c3ef34dfba09 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 724.393248] env[62522]: WARNING nova.compute.manager [req-8917dee6-51c5-4ad2-8806-396a9e06aad8 req-e917f6be-6478-4852-8a48-7ae769a2adee service nova] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Received unexpected event network-vif-plugged-284f1edc-1e35-4812-9696-c3ef34dfba09 for instance with vm_state building and task_state spawning. [ 724.393409] env[62522]: DEBUG nova.compute.manager [req-8917dee6-51c5-4ad2-8806-396a9e06aad8 req-e917f6be-6478-4852-8a48-7ae769a2adee service nova] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Received event network-changed-284f1edc-1e35-4812-9696-c3ef34dfba09 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 724.393563] env[62522]: DEBUG nova.compute.manager [req-8917dee6-51c5-4ad2-8806-396a9e06aad8 req-e917f6be-6478-4852-8a48-7ae769a2adee service nova] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Refreshing instance network info cache due to event network-changed-284f1edc-1e35-4812-9696-c3ef34dfba09. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 724.393751] env[62522]: DEBUG oslo_concurrency.lockutils [req-8917dee6-51c5-4ad2-8806-396a9e06aad8 req-e917f6be-6478-4852-8a48-7ae769a2adee service nova] Acquiring lock "refresh_cache-8461f823-e48a-42f0-8863-44177565b82d" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 724.393887] env[62522]: DEBUG oslo_concurrency.lockutils [req-8917dee6-51c5-4ad2-8806-396a9e06aad8 req-e917f6be-6478-4852-8a48-7ae769a2adee service nova] Acquired lock "refresh_cache-8461f823-e48a-42f0-8863-44177565b82d" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.394055] env[62522]: DEBUG nova.network.neutron [req-8917dee6-51c5-4ad2-8806-396a9e06aad8 req-e917f6be-6478-4852-8a48-7ae769a2adee service nova] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Refreshing network info cache for port 284f1edc-1e35-4812-9696-c3ef34dfba09 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 724.530810] env[62522]: DEBUG nova.network.neutron [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Successfully updated port: 451cd3c2-eeba-4ad9-a5c5-2ff0a4ee7d71 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 724.551773] env[62522]: DEBUG oslo_vmware.api [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526752fc-c5b9-273f-2f1a-43cf8b0c1991, 'name': SearchDatastore_Task, 'duration_secs': 0.009291} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.552117] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.552556] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 724.552769] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 724.552927] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.553159] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 724.553758] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2380530b-f379-4a29-9618-24cbc3791e25 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.562575] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 724.562857] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 724.563486] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4939f6f-b491-4499-ae84-e5b5cb8ffb11 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.568915] env[62522]: DEBUG oslo_vmware.api [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Waiting for the task: (returnval){ [ 724.568915] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52037933-9a5b-25b8-733e-58e4aedf1905" [ 724.568915] env[62522]: _type = "Task" [ 724.568915] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.577038] env[62522]: DEBUG oslo_vmware.api [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52037933-9a5b-25b8-733e-58e4aedf1905, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.829389] env[62522]: DEBUG nova.compute.utils [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 724.830990] env[62522]: DEBUG nova.compute.manager [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 724.831175] env[62522]: DEBUG nova.network.neutron [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 724.984862] env[62522]: DEBUG nova.policy [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '32247919dd95421bae260e20ea166dc1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '500952665d634a76916f1998279db580', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 725.035442] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Acquiring lock "refresh_cache-ae3e55b8-00c1-4dae-9276-f46a1e17b80e" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.035442] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Acquired lock "refresh_cache-ae3e55b8-00c1-4dae-9276-f46a1e17b80e" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.035442] env[62522]: DEBUG nova.network.neutron [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 725.081735] env[62522]: DEBUG oslo_vmware.api [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52037933-9a5b-25b8-733e-58e4aedf1905, 'name': SearchDatastore_Task, 'duration_secs': 0.010104} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.082574] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddcd6971-a81b-44d2-bced-abd63090e859 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.090189] env[62522]: DEBUG oslo_vmware.api [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Waiting for the task: (returnval){ [ 725.090189] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e8d466-0ebc-d0ee-4b30-40650a3dff95" [ 725.090189] env[62522]: _type = "Task" [ 725.090189] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.098102] env[62522]: DEBUG oslo_vmware.api [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e8d466-0ebc-d0ee-4b30-40650a3dff95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.150131] env[62522]: DEBUG nova.network.neutron [req-8917dee6-51c5-4ad2-8806-396a9e06aad8 req-e917f6be-6478-4852-8a48-7ae769a2adee service nova] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Updated VIF entry in instance network info cache for port 284f1edc-1e35-4812-9696-c3ef34dfba09. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 725.150131] env[62522]: DEBUG nova.network.neutron [req-8917dee6-51c5-4ad2-8806-396a9e06aad8 req-e917f6be-6478-4852-8a48-7ae769a2adee service nova] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Updating instance_info_cache with network_info: [{"id": "284f1edc-1e35-4812-9696-c3ef34dfba09", "address": "fa:16:3e:f4:98:0f", "network": {"id": "b0b85995-9eb6-469b-ab9a-56c124355719", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-292084153-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36972e78027c4318ab5be02c371c4934", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap284f1edc-1e", "ovs_interfaceid": "284f1edc-1e35-4812-9696-c3ef34dfba09", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.215654] env[62522]: DEBUG nova.compute.manager [req-2492166c-a9e9-41e2-9c5c-ba084a97c4ce req-e5913a89-8107-43e9-8d2f-37ba203edd8e service nova] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Received event network-vif-deleted-66de060c-1aa7-4119-b646-bd495f55add8 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 725.215654] env[62522]: DEBUG nova.compute.manager [req-2492166c-a9e9-41e2-9c5c-ba084a97c4ce req-e5913a89-8107-43e9-8d2f-37ba203edd8e service nova] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Received event network-vif-plugged-451cd3c2-eeba-4ad9-a5c5-2ff0a4ee7d71 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 725.215654] env[62522]: DEBUG oslo_concurrency.lockutils [req-2492166c-a9e9-41e2-9c5c-ba084a97c4ce req-e5913a89-8107-43e9-8d2f-37ba203edd8e service nova] Acquiring lock "ae3e55b8-00c1-4dae-9276-f46a1e17b80e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 725.215654] env[62522]: DEBUG oslo_concurrency.lockutils [req-2492166c-a9e9-41e2-9c5c-ba084a97c4ce req-e5913a89-8107-43e9-8d2f-37ba203edd8e service nova] Lock "ae3e55b8-00c1-4dae-9276-f46a1e17b80e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 725.215654] env[62522]: DEBUG oslo_concurrency.lockutils [req-2492166c-a9e9-41e2-9c5c-ba084a97c4ce req-e5913a89-8107-43e9-8d2f-37ba203edd8e service nova] Lock "ae3e55b8-00c1-4dae-9276-f46a1e17b80e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 725.215849] env[62522]: DEBUG nova.compute.manager [req-2492166c-a9e9-41e2-9c5c-ba084a97c4ce req-e5913a89-8107-43e9-8d2f-37ba203edd8e service nova] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] No waiting events found dispatching network-vif-plugged-451cd3c2-eeba-4ad9-a5c5-2ff0a4ee7d71 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 725.215849] env[62522]: WARNING nova.compute.manager [req-2492166c-a9e9-41e2-9c5c-ba084a97c4ce req-e5913a89-8107-43e9-8d2f-37ba203edd8e service nova] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Received unexpected event network-vif-plugged-451cd3c2-eeba-4ad9-a5c5-2ff0a4ee7d71 for instance with vm_state building and task_state spawning. [ 725.215849] env[62522]: DEBUG nova.compute.manager [req-2492166c-a9e9-41e2-9c5c-ba084a97c4ce req-e5913a89-8107-43e9-8d2f-37ba203edd8e service nova] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Received event network-changed-451cd3c2-eeba-4ad9-a5c5-2ff0a4ee7d71 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 725.216153] env[62522]: DEBUG nova.compute.manager [req-2492166c-a9e9-41e2-9c5c-ba084a97c4ce req-e5913a89-8107-43e9-8d2f-37ba203edd8e service nova] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Refreshing instance network info cache due to event network-changed-451cd3c2-eeba-4ad9-a5c5-2ff0a4ee7d71. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 725.216153] env[62522]: DEBUG oslo_concurrency.lockutils [req-2492166c-a9e9-41e2-9c5c-ba084a97c4ce req-e5913a89-8107-43e9-8d2f-37ba203edd8e service nova] Acquiring lock "refresh_cache-ae3e55b8-00c1-4dae-9276-f46a1e17b80e" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.333665] env[62522]: DEBUG nova.compute.manager [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 725.356802] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-458bfd4d-41e6-4dc0-803b-f9ce2f20983e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.366335] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-643f8061-8dbd-4eb2-8fdd-c7930fa0851a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.401263] env[62522]: DEBUG nova.network.neutron [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Updating instance_info_cache with network_info: [{"id": "6683db6f-edf5-4273-b92a-cb688e7baa82", "address": "fa:16:3e:96:b2:84", "network": {"id": "5f1d73d1-ff9e-4081-87cf-8df6294f67c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-892212702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "962664c996f24cf9ae192f79fae18ca4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "419a5b3f-4c6f-4168-9def-746b4d8c5c24", "external-id": "nsx-vlan-transportzone-656", "segmentation_id": 656, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6683db6f-ed", "ovs_interfaceid": "6683db6f-edf5-4273-b92a-cb688e7baa82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.406021] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fed475e4-8dd6-491f-8b3b-d6f704471255 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.413298] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55889caf-0920-4200-a30d-f255f26acdce {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.428801] env[62522]: DEBUG nova.compute.provider_tree [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 725.600424] env[62522]: DEBUG oslo_vmware.api [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e8d466-0ebc-d0ee-4b30-40650a3dff95, 'name': SearchDatastore_Task, 'duration_secs': 0.032278} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.600688] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 725.600951] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 8461f823-e48a-42f0-8863-44177565b82d/8461f823-e48a-42f0-8863-44177565b82d.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 725.601220] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ee94d4e2-a6d8-4b44-81e7-7a6e8b726583 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.607158] env[62522]: DEBUG nova.network.neutron [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 725.610185] env[62522]: DEBUG oslo_vmware.api [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Waiting for the task: (returnval){ [ 725.610185] env[62522]: value = "task-2415212" [ 725.610185] env[62522]: _type = "Task" [ 725.610185] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.617656] env[62522]: DEBUG oslo_vmware.api [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Task: {'id': task-2415212, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.650474] env[62522]: DEBUG oslo_concurrency.lockutils [req-8917dee6-51c5-4ad2-8806-396a9e06aad8 req-e917f6be-6478-4852-8a48-7ae769a2adee service nova] Releasing lock "refresh_cache-8461f823-e48a-42f0-8863-44177565b82d" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 725.650741] env[62522]: DEBUG nova.compute.manager [req-8917dee6-51c5-4ad2-8806-396a9e06aad8 req-e917f6be-6478-4852-8a48-7ae769a2adee service nova] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Received event network-changed-ca4e7776-76bf-40fc-ac2a-ac8917ca2978 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 725.650968] env[62522]: DEBUG nova.compute.manager [req-8917dee6-51c5-4ad2-8806-396a9e06aad8 req-e917f6be-6478-4852-8a48-7ae769a2adee service nova] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Refreshing instance network info cache due to event network-changed-ca4e7776-76bf-40fc-ac2a-ac8917ca2978. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 725.651199] env[62522]: DEBUG oslo_concurrency.lockutils [req-8917dee6-51c5-4ad2-8806-396a9e06aad8 req-e917f6be-6478-4852-8a48-7ae769a2adee service nova] Acquiring lock "refresh_cache-cce5f0d4-364d-4295-a27d-44ca8585f803" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.651349] env[62522]: DEBUG oslo_concurrency.lockutils [req-8917dee6-51c5-4ad2-8806-396a9e06aad8 req-e917f6be-6478-4852-8a48-7ae769a2adee service nova] Acquired lock "refresh_cache-cce5f0d4-364d-4295-a27d-44ca8585f803" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.651515] env[62522]: DEBUG nova.network.neutron [req-8917dee6-51c5-4ad2-8806-396a9e06aad8 req-e917f6be-6478-4852-8a48-7ae769a2adee service nova] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Refreshing network info cache for port ca4e7776-76bf-40fc-ac2a-ac8917ca2978 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 725.695313] env[62522]: DEBUG nova.network.neutron [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Successfully created port: 6fc5a3d3-c159-4c8e-978d-723aeeb43441 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 725.908275] env[62522]: DEBUG oslo_concurrency.lockutils [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Releasing lock "refresh_cache-cd69a052-369b-4809-baf0-a1aec44f4ab5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 725.931373] env[62522]: DEBUG nova.scheduler.client.report [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 725.984106] env[62522]: DEBUG nova.network.neutron [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Updating instance_info_cache with network_info: [{"id": "451cd3c2-eeba-4ad9-a5c5-2ff0a4ee7d71", "address": "fa:16:3e:72:b3:7f", "network": {"id": "67803a10-3d20-412d-8239-f15c6c3e90db", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1068967527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c84fc4124fca4b12b4d3260601eeee83", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b49c5024-2ced-42ca-90cc-6066766d43e6", "external-id": "nsx-vlan-transportzone-239", "segmentation_id": 239, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap451cd3c2-ee", "ovs_interfaceid": "451cd3c2-eeba-4ad9-a5c5-2ff0a4ee7d71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.121492] env[62522]: DEBUG oslo_vmware.api [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Task: {'id': task-2415212, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.345342] env[62522]: DEBUG nova.compute.manager [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 726.374330] env[62522]: DEBUG nova.virt.hardware [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 726.374630] env[62522]: DEBUG nova.virt.hardware [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 726.374796] env[62522]: DEBUG nova.virt.hardware [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 726.374979] env[62522]: DEBUG nova.virt.hardware [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 726.375188] env[62522]: DEBUG nova.virt.hardware [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 726.375377] env[62522]: DEBUG nova.virt.hardware [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 726.375620] env[62522]: DEBUG nova.virt.hardware [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 726.375814] env[62522]: DEBUG nova.virt.hardware [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 726.376812] env[62522]: DEBUG nova.virt.hardware [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 726.377038] env[62522]: DEBUG nova.virt.hardware [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 726.377229] env[62522]: DEBUG nova.virt.hardware [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 726.378087] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a687c322-406a-4326-857c-8e3f4609810f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.390402] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61b7a593-6091-404c-a89a-ee90ac633452 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.436827] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.114s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 726.439498] env[62522]: DEBUG oslo_concurrency.lockutils [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.078s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.440757] env[62522]: DEBUG nova.objects.instance [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lazy-loading 'resources' on Instance uuid 68b4c229-0ace-486f-9a99-d3c955b7bdfb {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 726.468968] env[62522]: INFO nova.scheduler.client.report [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Deleted allocations for instance 6d8b5429-113b-4280-9851-bf6614dde4a7 [ 726.488043] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Releasing lock "refresh_cache-ae3e55b8-00c1-4dae-9276-f46a1e17b80e" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 726.489968] env[62522]: DEBUG nova.compute.manager [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Instance network_info: |[{"id": "451cd3c2-eeba-4ad9-a5c5-2ff0a4ee7d71", "address": "fa:16:3e:72:b3:7f", "network": {"id": "67803a10-3d20-412d-8239-f15c6c3e90db", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1068967527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c84fc4124fca4b12b4d3260601eeee83", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b49c5024-2ced-42ca-90cc-6066766d43e6", "external-id": "nsx-vlan-transportzone-239", "segmentation_id": 239, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap451cd3c2-ee", "ovs_interfaceid": "451cd3c2-eeba-4ad9-a5c5-2ff0a4ee7d71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 726.492071] env[62522]: DEBUG oslo_concurrency.lockutils [req-2492166c-a9e9-41e2-9c5c-ba084a97c4ce req-e5913a89-8107-43e9-8d2f-37ba203edd8e service nova] Acquired lock "refresh_cache-ae3e55b8-00c1-4dae-9276-f46a1e17b80e" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.492071] env[62522]: DEBUG nova.network.neutron [req-2492166c-a9e9-41e2-9c5c-ba084a97c4ce req-e5913a89-8107-43e9-8d2f-37ba203edd8e service nova] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Refreshing network info cache for port 451cd3c2-eeba-4ad9-a5c5-2ff0a4ee7d71 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 726.492071] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:72:b3:7f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b49c5024-2ced-42ca-90cc-6066766d43e6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '451cd3c2-eeba-4ad9-a5c5-2ff0a4ee7d71', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 726.501033] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Creating folder: Project (c84fc4124fca4b12b4d3260601eeee83). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 726.503146] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-034d71d0-55be-435c-b6fa-91728144c007 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.512254] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Created folder: Project (c84fc4124fca4b12b4d3260601eeee83) in parent group-v489562. [ 726.512451] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Creating folder: Instances. Parent ref: group-v489640. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 726.512681] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6b3a7b71-de43-4800-9c0e-149de8d15d5d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.521846] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Created folder: Instances in parent group-v489640. [ 726.522096] env[62522]: DEBUG oslo.service.loopingcall [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 726.522285] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 726.522485] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4bc75129-67ee-4b49-985b-0e51cee40ca7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.542588] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 726.542588] env[62522]: value = "task-2415215" [ 726.542588] env[62522]: _type = "Task" [ 726.542588] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.550059] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415215, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.621395] env[62522]: DEBUG oslo_vmware.api [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Task: {'id': task-2415212, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.780998} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.621751] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 8461f823-e48a-42f0-8863-44177565b82d/8461f823-e48a-42f0-8863-44177565b82d.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 726.622031] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 726.622345] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-48d302bb-694c-49bc-aebc-8387be4a1e6c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.628346] env[62522]: DEBUG oslo_vmware.api [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Waiting for the task: (returnval){ [ 726.628346] env[62522]: value = "task-2415216" [ 726.628346] env[62522]: _type = "Task" [ 726.628346] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.636219] env[62522]: DEBUG oslo_vmware.api [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Task: {'id': task-2415216, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.789512] env[62522]: DEBUG nova.network.neutron [req-8917dee6-51c5-4ad2-8806-396a9e06aad8 req-e917f6be-6478-4852-8a48-7ae769a2adee service nova] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Updated VIF entry in instance network info cache for port ca4e7776-76bf-40fc-ac2a-ac8917ca2978. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 726.791175] env[62522]: DEBUG nova.network.neutron [req-8917dee6-51c5-4ad2-8806-396a9e06aad8 req-e917f6be-6478-4852-8a48-7ae769a2adee service nova] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Updating instance_info_cache with network_info: [{"id": "ca4e7776-76bf-40fc-ac2a-ac8917ca2978", "address": "fa:16:3e:fa:a4:67", "network": {"id": "27951c52-e28e-4c94-968c-c1b5ddd6b58b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1545103257-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ec421e0535f04c2ba17759e8342e1897", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca4e7776-76", "ovs_interfaceid": "ca4e7776-76bf-40fc-ac2a-ac8917ca2978", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.976867] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e50ca4d0-f51c-4c01-ab0b-bbdc008d871c tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "6d8b5429-113b-4280-9851-bf6614dde4a7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.619s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.056128] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415215, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.141119] env[62522]: DEBUG oslo_vmware.api [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Task: {'id': task-2415216, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070779} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.141418] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 727.144208] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-232d7ac6-ac2e-4942-980d-0f9c39b0d628 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.171044] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Reconfiguring VM instance instance-0000001b to attach disk [datastore2] 8461f823-e48a-42f0-8863-44177565b82d/8461f823-e48a-42f0-8863-44177565b82d.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 727.176401] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d5d3441b-37d7-4ae9-812c-e374581ad91b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.196741] env[62522]: DEBUG oslo_vmware.api [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Waiting for the task: (returnval){ [ 727.196741] env[62522]: value = "task-2415217" [ 727.196741] env[62522]: _type = "Task" [ 727.196741] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.208699] env[62522]: DEBUG oslo_vmware.api [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Task: {'id': task-2415217, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.296290] env[62522]: DEBUG oslo_concurrency.lockutils [req-8917dee6-51c5-4ad2-8806-396a9e06aad8 req-e917f6be-6478-4852-8a48-7ae769a2adee service nova] Releasing lock "refresh_cache-cce5f0d4-364d-4295-a27d-44ca8585f803" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.402694] env[62522]: DEBUG nova.network.neutron [req-2492166c-a9e9-41e2-9c5c-ba084a97c4ce req-e5913a89-8107-43e9-8d2f-37ba203edd8e service nova] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Updated VIF entry in instance network info cache for port 451cd3c2-eeba-4ad9-a5c5-2ff0a4ee7d71. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 727.402694] env[62522]: DEBUG nova.network.neutron [req-2492166c-a9e9-41e2-9c5c-ba084a97c4ce req-e5913a89-8107-43e9-8d2f-37ba203edd8e service nova] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Updating instance_info_cache with network_info: [{"id": "451cd3c2-eeba-4ad9-a5c5-2ff0a4ee7d71", "address": "fa:16:3e:72:b3:7f", "network": {"id": "67803a10-3d20-412d-8239-f15c6c3e90db", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1068967527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c84fc4124fca4b12b4d3260601eeee83", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b49c5024-2ced-42ca-90cc-6066766d43e6", "external-id": "nsx-vlan-transportzone-239", "segmentation_id": 239, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap451cd3c2-ee", "ovs_interfaceid": "451cd3c2-eeba-4ad9-a5c5-2ff0a4ee7d71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.456746] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 727.457487] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-72ced5af-b7e6-47a4-bb14-3d32382540d9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.464769] env[62522]: DEBUG oslo_vmware.api [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 727.464769] env[62522]: value = "task-2415218" [ 727.464769] env[62522]: _type = "Task" [ 727.464769] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.470409] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cc9ed48-e1d8-45df-b0bc-3d962f3115a1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.478367] env[62522]: DEBUG oslo_vmware.api [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415218, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.481110] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f952b9c-1a23-4365-ad18-763ec5f379d1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.514327] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f31e06f-1d28-443f-80df-6437851adc57 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.522780] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93712f33-d09f-467a-bb4c-433b2459d085 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.538580] env[62522]: DEBUG nova.compute.provider_tree [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 727.554140] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415215, 'name': CreateVM_Task, 'duration_secs': 0.562813} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.554335] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 727.554989] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.555176] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.555498] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 727.555739] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0efee6c1-9c9b-4f36-a9b9-508e9ad51125 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.560832] env[62522]: DEBUG oslo_vmware.api [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Waiting for the task: (returnval){ [ 727.560832] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5239a46b-697a-3241-018f-3e78a225b72e" [ 727.560832] env[62522]: _type = "Task" [ 727.560832] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.568812] env[62522]: DEBUG oslo_vmware.api [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5239a46b-697a-3241-018f-3e78a225b72e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.706628] env[62522]: DEBUG oslo_vmware.api [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Task: {'id': task-2415217, 'name': ReconfigVM_Task, 'duration_secs': 0.293685} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.706944] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Reconfigured VM instance instance-0000001b to attach disk [datastore2] 8461f823-e48a-42f0-8863-44177565b82d/8461f823-e48a-42f0-8863-44177565b82d.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 727.707691] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-02cb27cf-57ff-4839-b806-263a81b7871b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.709651] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 727.709767] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 727.715429] env[62522]: DEBUG oslo_vmware.api [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Waiting for the task: (returnval){ [ 727.715429] env[62522]: value = "task-2415219" [ 727.715429] env[62522]: _type = "Task" [ 727.715429] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.723498] env[62522]: DEBUG oslo_vmware.api [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Task: {'id': task-2415219, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.787798] env[62522]: DEBUG nova.compute.manager [req-1cd9325c-53bb-4d7e-bbcc-fcacdf51e8d4 req-d9c33fa0-07e9-4c8b-acdd-418bbec24148 service nova] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Received event network-vif-plugged-6fc5a3d3-c159-4c8e-978d-723aeeb43441 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 727.788039] env[62522]: DEBUG oslo_concurrency.lockutils [req-1cd9325c-53bb-4d7e-bbcc-fcacdf51e8d4 req-d9c33fa0-07e9-4c8b-acdd-418bbec24148 service nova] Acquiring lock "87a90c88-6e0a-4051-8978-b2f9c5a876ca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.788266] env[62522]: DEBUG oslo_concurrency.lockutils [req-1cd9325c-53bb-4d7e-bbcc-fcacdf51e8d4 req-d9c33fa0-07e9-4c8b-acdd-418bbec24148 service nova] Lock "87a90c88-6e0a-4051-8978-b2f9c5a876ca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.788440] env[62522]: DEBUG oslo_concurrency.lockutils [req-1cd9325c-53bb-4d7e-bbcc-fcacdf51e8d4 req-d9c33fa0-07e9-4c8b-acdd-418bbec24148 service nova] Lock "87a90c88-6e0a-4051-8978-b2f9c5a876ca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.788655] env[62522]: DEBUG nova.compute.manager [req-1cd9325c-53bb-4d7e-bbcc-fcacdf51e8d4 req-d9c33fa0-07e9-4c8b-acdd-418bbec24148 service nova] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] No waiting events found dispatching network-vif-plugged-6fc5a3d3-c159-4c8e-978d-723aeeb43441 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 727.788824] env[62522]: WARNING nova.compute.manager [req-1cd9325c-53bb-4d7e-bbcc-fcacdf51e8d4 req-d9c33fa0-07e9-4c8b-acdd-418bbec24148 service nova] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Received unexpected event network-vif-plugged-6fc5a3d3-c159-4c8e-978d-723aeeb43441 for instance with vm_state building and task_state spawning. [ 727.845521] env[62522]: DEBUG nova.network.neutron [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Successfully updated port: 6fc5a3d3-c159-4c8e-978d-723aeeb43441 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 727.906819] env[62522]: DEBUG oslo_concurrency.lockutils [req-2492166c-a9e9-41e2-9c5c-ba084a97c4ce req-e5913a89-8107-43e9-8d2f-37ba203edd8e service nova] Releasing lock "refresh_cache-ae3e55b8-00c1-4dae-9276-f46a1e17b80e" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.975591] env[62522]: DEBUG oslo_vmware.api [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415218, 'name': PowerOffVM_Task, 'duration_secs': 0.206716} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.975867] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 727.977091] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1cb647d-4933-4603-92b4-19eccc31f620 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.997849] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e5acaa4-1b59-4041-afd0-6f14ff259d25 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.027788] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 728.028087] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4df68c7e-4fb9-4c8b-9ef7-1825d90cac0c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.034049] env[62522]: DEBUG oslo_vmware.api [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 728.034049] env[62522]: value = "task-2415220" [ 728.034049] env[62522]: _type = "Task" [ 728.034049] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.042764] env[62522]: DEBUG nova.scheduler.client.report [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 728.047290] env[62522]: DEBUG oslo_vmware.api [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415220, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.070015] env[62522]: DEBUG oslo_vmware.api [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5239a46b-697a-3241-018f-3e78a225b72e, 'name': SearchDatastore_Task, 'duration_secs': 0.013569} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.070403] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 728.070635] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 728.070860] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 728.071015] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.071200] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 728.071842] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1b5860e8-7c78-4d7d-9fa6-e46d75a7a6c3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.079623] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 728.079801] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 728.080682] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7545724-4856-4575-bb96-78ec596a7c2e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.085812] env[62522]: DEBUG oslo_vmware.api [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Waiting for the task: (returnval){ [ 728.085812] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5224cf0b-5acd-56a8-f879-33014cbc3835" [ 728.085812] env[62522]: _type = "Task" [ 728.085812] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.093363] env[62522]: DEBUG oslo_vmware.api [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5224cf0b-5acd-56a8-f879-33014cbc3835, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.222656] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 728.222656] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Starting heal instance info cache {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 728.222656] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Rebuilding the list of instances to heal {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 728.228977] env[62522]: DEBUG oslo_vmware.api [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Task: {'id': task-2415219, 'name': Rename_Task, 'duration_secs': 0.142135} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.229240] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 728.229558] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6fca692f-75ee-4a73-b646-dae18688cf4f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.235587] env[62522]: DEBUG oslo_vmware.api [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Waiting for the task: (returnval){ [ 728.235587] env[62522]: value = "task-2415221" [ 728.235587] env[62522]: _type = "Task" [ 728.235587] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.243649] env[62522]: DEBUG oslo_vmware.api [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Task: {'id': task-2415221, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.348426] env[62522]: DEBUG oslo_concurrency.lockutils [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Acquiring lock "refresh_cache-87a90c88-6e0a-4051-8978-b2f9c5a876ca" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 728.348631] env[62522]: DEBUG oslo_concurrency.lockutils [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Acquired lock "refresh_cache-87a90c88-6e0a-4051-8978-b2f9c5a876ca" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.348833] env[62522]: DEBUG nova.network.neutron [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 728.545386] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] VM already powered off {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 728.550178] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 728.550178] env[62522]: DEBUG oslo_concurrency.lockutils [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 728.550178] env[62522]: DEBUG oslo_concurrency.lockutils [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.550178] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 728.550178] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b4b573fa-a8c9-47ec-bd72-1997bf187cca {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.550426] env[62522]: DEBUG oslo_concurrency.lockutils [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.109s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 728.550696] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.045s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 728.550939] env[62522]: DEBUG nova.objects.instance [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Lazy-loading 'resources' on Instance uuid c73686c6-4dd8-4f00-a65a-5d8574409ad1 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 728.559255] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 728.559741] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 728.560793] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77f6962b-20b1-4de2-b6b3-69375e58c0e5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.567126] env[62522]: DEBUG oslo_vmware.api [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 728.567126] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5222791d-7233-fb1c-9467-e3b8bfb6e247" [ 728.567126] env[62522]: _type = "Task" [ 728.567126] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.575269] env[62522]: DEBUG oslo_vmware.api [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5222791d-7233-fb1c-9467-e3b8bfb6e247, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.579116] env[62522]: INFO nova.scheduler.client.report [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Deleted allocations for instance 68b4c229-0ace-486f-9a99-d3c955b7bdfb [ 728.595944] env[62522]: DEBUG oslo_vmware.api [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5224cf0b-5acd-56a8-f879-33014cbc3835, 'name': SearchDatastore_Task, 'duration_secs': 0.008495} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.596798] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bdea0b39-82ee-4652-a1d7-4ff94e108285 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.602698] env[62522]: DEBUG oslo_vmware.api [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Waiting for the task: (returnval){ [ 728.602698] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52592f8d-363e-c288-9afc-3abc91d7538d" [ 728.602698] env[62522]: _type = "Task" [ 728.602698] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.611354] env[62522]: DEBUG oslo_vmware.api [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52592f8d-363e-c288-9afc-3abc91d7538d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.728257] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Skipping network cache update for instance because it is Building. {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 728.728354] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Skipping network cache update for instance because it is Building. {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 728.728442] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Skipping network cache update for instance because it is Building. {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 728.744993] env[62522]: DEBUG oslo_vmware.api [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Task: {'id': task-2415221, 'name': PowerOnVM_Task, 'duration_secs': 0.448919} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.745349] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 728.745550] env[62522]: INFO nova.compute.manager [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Took 8.78 seconds to spawn the instance on the hypervisor. [ 728.745731] env[62522]: DEBUG nova.compute.manager [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 728.746535] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e072f0c5-b610-422d-8849-230f1b0533c9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.783453] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "refresh_cache-3824a70e-8498-410a-904d-c7cd0de0c358" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 728.783596] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquired lock "refresh_cache-3824a70e-8498-410a-904d-c7cd0de0c358" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.783742] env[62522]: DEBUG nova.network.neutron [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Forcefully refreshing network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 728.783891] env[62522]: DEBUG nova.objects.instance [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lazy-loading 'info_cache' on Instance uuid 3824a70e-8498-410a-904d-c7cd0de0c358 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 728.902255] env[62522]: DEBUG nova.network.neutron [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 729.093328] env[62522]: DEBUG oslo_vmware.api [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5222791d-7233-fb1c-9467-e3b8bfb6e247, 'name': SearchDatastore_Task, 'duration_secs': 0.009556} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.093809] env[62522]: DEBUG oslo_concurrency.lockutils [None req-01fbc155-9d30-491b-9078-c9c04a840a6b tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "68b4c229-0ace-486f-9a99-d3c955b7bdfb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.108s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.101699] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8576534c-45e6-4790-abfe-cbb0b3543954 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.103795] env[62522]: DEBUG oslo_vmware.api [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 729.103795] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f2102c-81ad-0807-f8a6-914829d7870b" [ 729.103795] env[62522]: _type = "Task" [ 729.103795] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.115663] env[62522]: DEBUG oslo_vmware.api [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52592f8d-363e-c288-9afc-3abc91d7538d, 'name': SearchDatastore_Task, 'duration_secs': 0.008597} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.122747] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 729.123012] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] ae3e55b8-00c1-4dae-9276-f46a1e17b80e/ae3e55b8-00c1-4dae-9276-f46a1e17b80e.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 729.123301] env[62522]: DEBUG oslo_vmware.api [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f2102c-81ad-0807-f8a6-914829d7870b, 'name': SearchDatastore_Task, 'duration_secs': 0.00941} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.124395] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-57c1a1c5-04c2-4904-87c8-a7e57dc81c7d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.126808] env[62522]: DEBUG oslo_concurrency.lockutils [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 729.127057] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] cd69a052-369b-4809-baf0-a1aec44f4ab5/2ee4561b-ba48-4f45-82f6-eac89be98290-rescue.vmdk. {{(pid=62522) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 729.127565] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-49048a43-7ab1-4948-9949-4d89b4c92aef {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.135035] env[62522]: DEBUG oslo_vmware.api [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Waiting for the task: (returnval){ [ 729.135035] env[62522]: value = "task-2415222" [ 729.135035] env[62522]: _type = "Task" [ 729.135035] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.135592] env[62522]: DEBUG oslo_vmware.api [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 729.135592] env[62522]: value = "task-2415223" [ 729.135592] env[62522]: _type = "Task" [ 729.135592] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.150200] env[62522]: DEBUG nova.network.neutron [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Updating instance_info_cache with network_info: [{"id": "6fc5a3d3-c159-4c8e-978d-723aeeb43441", "address": "fa:16:3e:43:6f:27", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fc5a3d3-c1", "ovs_interfaceid": "6fc5a3d3-c159-4c8e-978d-723aeeb43441", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.162362] env[62522]: DEBUG oslo_vmware.api [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Task: {'id': task-2415222, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.165848] env[62522]: DEBUG oslo_vmware.api [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415223, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.267211] env[62522]: INFO nova.compute.manager [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Took 40.14 seconds to build instance. [ 729.591770] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-818c1a53-aa6c-4679-b32a-603a2b3dd074 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.601435] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9214e37f-c72c-4eaa-a675-34a57c17729e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.644259] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26fbe0da-746f-4b3c-b06e-214472b7901a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.667519] env[62522]: DEBUG oslo_concurrency.lockutils [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Acquiring lock "76cb551e-e605-4c80-a6ef-e36681fc0bc2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 729.667946] env[62522]: DEBUG oslo_concurrency.lockutils [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Lock "76cb551e-e605-4c80-a6ef-e36681fc0bc2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.668286] env[62522]: DEBUG oslo_vmware.api [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Task: {'id': task-2415222, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.521025} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.668767] env[62522]: DEBUG oslo_concurrency.lockutils [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Releasing lock "refresh_cache-87a90c88-6e0a-4051-8978-b2f9c5a876ca" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 729.669135] env[62522]: DEBUG nova.compute.manager [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Instance network_info: |[{"id": "6fc5a3d3-c159-4c8e-978d-723aeeb43441", "address": "fa:16:3e:43:6f:27", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fc5a3d3-c1", "ovs_interfaceid": "6fc5a3d3-c159-4c8e-978d-723aeeb43441", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 729.669437] env[62522]: DEBUG oslo_vmware.api [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415223, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.670229] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] ae3e55b8-00c1-4dae-9276-f46a1e17b80e/ae3e55b8-00c1-4dae-9276-f46a1e17b80e.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 729.670229] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 729.670875] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:43:6f:27', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f6d1427-d86b-4371-9172-50e4bb0eb1cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6fc5a3d3-c159-4c8e-978d-723aeeb43441', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 729.680634] env[62522]: DEBUG oslo.service.loopingcall [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 729.684399] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d0cdc57-e9ef-4428-979a-8464569efe37 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.687821] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-515dec9b-5802-4929-bfe4-8c819b522f5a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.690104] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 729.690375] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1dbe1bb1-af45-4169-b33d-5e0a360daf19 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.716647] env[62522]: DEBUG nova.compute.provider_tree [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 729.719687] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 729.719687] env[62522]: value = "task-2415225" [ 729.719687] env[62522]: _type = "Task" [ 729.719687] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.719987] env[62522]: DEBUG oslo_vmware.api [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Waiting for the task: (returnval){ [ 729.719987] env[62522]: value = "task-2415224" [ 729.719987] env[62522]: _type = "Task" [ 729.719987] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.734610] env[62522]: DEBUG oslo_vmware.api [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Task: {'id': task-2415224, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.734802] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415225, 'name': CreateVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.768887] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1ada2c2d-f6df-446b-837c-38ee7ab2b9f3 tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Lock "8461f823-e48a-42f0-8863-44177565b82d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.626s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.855289] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Acquiring lock "8461f823-e48a-42f0-8863-44177565b82d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 729.855556] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Lock "8461f823-e48a-42f0-8863-44177565b82d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.855915] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Acquiring lock "8461f823-e48a-42f0-8863-44177565b82d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 729.856164] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Lock "8461f823-e48a-42f0-8863-44177565b82d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.856347] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Lock "8461f823-e48a-42f0-8863-44177565b82d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.858849] env[62522]: INFO nova.compute.manager [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Terminating instance [ 730.137693] env[62522]: DEBUG nova.compute.manager [req-6ab0ca19-348f-46d5-97c1-82bbe939e57f req-f73a3ce2-681f-491c-afbb-00252b26c5d8 service nova] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Received event network-changed-6fc5a3d3-c159-4c8e-978d-723aeeb43441 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 730.137693] env[62522]: DEBUG nova.compute.manager [req-6ab0ca19-348f-46d5-97c1-82bbe939e57f req-f73a3ce2-681f-491c-afbb-00252b26c5d8 service nova] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Refreshing instance network info cache due to event network-changed-6fc5a3d3-c159-4c8e-978d-723aeeb43441. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 730.137693] env[62522]: DEBUG oslo_concurrency.lockutils [req-6ab0ca19-348f-46d5-97c1-82bbe939e57f req-f73a3ce2-681f-491c-afbb-00252b26c5d8 service nova] Acquiring lock "refresh_cache-87a90c88-6e0a-4051-8978-b2f9c5a876ca" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 730.137693] env[62522]: DEBUG oslo_concurrency.lockutils [req-6ab0ca19-348f-46d5-97c1-82bbe939e57f req-f73a3ce2-681f-491c-afbb-00252b26c5d8 service nova] Acquired lock "refresh_cache-87a90c88-6e0a-4051-8978-b2f9c5a876ca" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.138564] env[62522]: DEBUG nova.network.neutron [req-6ab0ca19-348f-46d5-97c1-82bbe939e57f req-f73a3ce2-681f-491c-afbb-00252b26c5d8 service nova] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Refreshing network info cache for port 6fc5a3d3-c159-4c8e-978d-723aeeb43441 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 730.159119] env[62522]: DEBUG oslo_vmware.api [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415223, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.57197} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.159119] env[62522]: INFO nova.virt.vmwareapi.ds_util [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] cd69a052-369b-4809-baf0-a1aec44f4ab5/2ee4561b-ba48-4f45-82f6-eac89be98290-rescue.vmdk. [ 730.159743] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-604a5aeb-9518-4b08-844d-96453145063e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.188447] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] cd69a052-369b-4809-baf0-a1aec44f4ab5/2ee4561b-ba48-4f45-82f6-eac89be98290-rescue.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 730.189079] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8ee842c-bd91-4ed8-904c-d3a0f88ec7ff {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.211510] env[62522]: DEBUG oslo_vmware.api [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 730.211510] env[62522]: value = "task-2415226" [ 730.211510] env[62522]: _type = "Task" [ 730.211510] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.220163] env[62522]: DEBUG oslo_vmware.api [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415226, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.227173] env[62522]: DEBUG nova.scheduler.client.report [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 730.236290] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415225, 'name': CreateVM_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.236595] env[62522]: DEBUG oslo_vmware.api [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Task: {'id': task-2415224, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058261} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.237153] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 730.237950] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3042e10d-bc49-4fe3-b78f-43209be5952c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.263632] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Reconfiguring VM instance instance-0000001c to attach disk [datastore2] ae3e55b8-00c1-4dae-9276-f46a1e17b80e/ae3e55b8-00c1-4dae-9276-f46a1e17b80e.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 730.264858] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0605453b-7614-410a-bc47-1763358029fa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.283261] env[62522]: DEBUG nova.compute.manager [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 730.294747] env[62522]: DEBUG oslo_vmware.api [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Waiting for the task: (returnval){ [ 730.294747] env[62522]: value = "task-2415227" [ 730.294747] env[62522]: _type = "Task" [ 730.294747] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.305116] env[62522]: DEBUG oslo_vmware.api [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Task: {'id': task-2415227, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.365799] env[62522]: DEBUG nova.compute.manager [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 730.366103] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 730.367077] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2dfee64-45c6-45ab-8b98-cc2179d75bf2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.382010] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 730.382355] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-218f3a50-25e8-4fd9-82a0-f9d1797884e4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.391088] env[62522]: DEBUG oslo_vmware.api [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Waiting for the task: (returnval){ [ 730.391088] env[62522]: value = "task-2415228" [ 730.391088] env[62522]: _type = "Task" [ 730.391088] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.399977] env[62522]: DEBUG oslo_vmware.api [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Task: {'id': task-2415228, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.434912] env[62522]: DEBUG oslo_concurrency.lockutils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquiring lock "5ed51dce-2a56-4389-acf8-280bd93ff5f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.435218] env[62522]: DEBUG oslo_concurrency.lockutils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "5ed51dce-2a56-4389-acf8-280bd93ff5f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.461056] env[62522]: DEBUG oslo_concurrency.lockutils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquiring lock "74e663b1-b552-4b71-aa74-308e908d79e7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.461312] env[62522]: DEBUG oslo_concurrency.lockutils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "74e663b1-b552-4b71-aa74-308e908d79e7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.724336] env[62522]: DEBUG oslo_vmware.api [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415226, 'name': ReconfigVM_Task, 'duration_secs': 0.29984} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.726996] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Reconfigured VM instance instance-0000001a to attach disk [datastore1] cd69a052-369b-4809-baf0-a1aec44f4ab5/2ee4561b-ba48-4f45-82f6-eac89be98290-rescue.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 730.727819] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aef91f09-6d9f-459f-84d1-82c6e06e83f8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.735886] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415225, 'name': CreateVM_Task, 'duration_secs': 0.768716} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.751348] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 730.752103] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.201s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 730.759808] env[62522]: DEBUG oslo_concurrency.lockutils [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 730.759808] env[62522]: DEBUG oslo_concurrency.lockutils [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.763718] env[62522]: DEBUG oslo_concurrency.lockutils [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 730.763718] env[62522]: DEBUG oslo_concurrency.lockutils [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.443s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.763718] env[62522]: INFO nova.compute.claims [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 730.764507] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e8cf77f7-0183-4cc0-bf44-09068a5ff3b2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.775774] env[62522]: DEBUG nova.network.neutron [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Updating instance_info_cache with network_info: [{"id": "a0e9b152-7b65-405a-8302-dc8561d06224", "address": "fa:16:3e:e0:65:aa", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0e9b152-7b", "ovs_interfaceid": "a0e9b152-7b65-405a-8302-dc8561d06224", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.776526] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc493173-55c2-486a-a217-e8494df2b086 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.781449] env[62522]: DEBUG oslo_vmware.api [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Waiting for the task: (returnval){ [ 730.781449] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]528d3855-0e76-d5ad-fe48-436e713bb8cb" [ 730.781449] env[62522]: _type = "Task" [ 730.781449] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.785983] env[62522]: DEBUG oslo_vmware.api [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 730.785983] env[62522]: value = "task-2415229" [ 730.785983] env[62522]: _type = "Task" [ 730.785983] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.794495] env[62522]: DEBUG oslo_vmware.api [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]528d3855-0e76-d5ad-fe48-436e713bb8cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.798503] env[62522]: INFO nova.scheduler.client.report [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Deleted allocations for instance c73686c6-4dd8-4f00-a65a-5d8574409ad1 [ 730.811299] env[62522]: DEBUG oslo_vmware.api [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415229, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.817367] env[62522]: DEBUG oslo_vmware.api [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Task: {'id': task-2415227, 'name': ReconfigVM_Task, 'duration_secs': 0.331091} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.817650] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Reconfigured VM instance instance-0000001c to attach disk [datastore2] ae3e55b8-00c1-4dae-9276-f46a1e17b80e/ae3e55b8-00c1-4dae-9276-f46a1e17b80e.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 730.818542] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3701154d-14da-4c7b-9a83-0f986c052b41 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.823712] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.825437] env[62522]: DEBUG oslo_vmware.api [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Waiting for the task: (returnval){ [ 730.825437] env[62522]: value = "task-2415230" [ 730.825437] env[62522]: _type = "Task" [ 730.825437] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.833819] env[62522]: DEBUG oslo_vmware.api [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Task: {'id': task-2415230, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.900659] env[62522]: DEBUG oslo_vmware.api [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Task: {'id': task-2415228, 'name': PowerOffVM_Task, 'duration_secs': 0.197879} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.900981] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 730.901201] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 730.901483] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3a47ef4f-0800-4185-a94c-ea9e4bef6838 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.961837] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 730.962104] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 730.962306] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Deleting the datastore file [datastore2] 8461f823-e48a-42f0-8863-44177565b82d {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 730.962580] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7771c221-defa-49ab-bafd-9db526d6bbda {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.968890] env[62522]: DEBUG oslo_vmware.api [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Waiting for the task: (returnval){ [ 730.968890] env[62522]: value = "task-2415232" [ 730.968890] env[62522]: _type = "Task" [ 730.968890] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.977297] env[62522]: DEBUG oslo_vmware.api [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Task: {'id': task-2415232, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.997901] env[62522]: DEBUG nova.network.neutron [req-6ab0ca19-348f-46d5-97c1-82bbe939e57f req-f73a3ce2-681f-491c-afbb-00252b26c5d8 service nova] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Updated VIF entry in instance network info cache for port 6fc5a3d3-c159-4c8e-978d-723aeeb43441. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 730.997901] env[62522]: DEBUG nova.network.neutron [req-6ab0ca19-348f-46d5-97c1-82bbe939e57f req-f73a3ce2-681f-491c-afbb-00252b26c5d8 service nova] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Updating instance_info_cache with network_info: [{"id": "6fc5a3d3-c159-4c8e-978d-723aeeb43441", "address": "fa:16:3e:43:6f:27", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fc5a3d3-c1", "ovs_interfaceid": "6fc5a3d3-c159-4c8e-978d-723aeeb43441", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.280426] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Releasing lock "refresh_cache-3824a70e-8498-410a-904d-c7cd0de0c358" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 731.280724] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Updated the network info_cache for instance {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 731.280827] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 731.280984] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 731.282127] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 731.282127] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 731.282127] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 731.282127] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 731.282127] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62522) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 731.285340] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 731.293849] env[62522]: DEBUG oslo_vmware.api [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]528d3855-0e76-d5ad-fe48-436e713bb8cb, 'name': SearchDatastore_Task, 'duration_secs': 0.010306} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.295043] env[62522]: DEBUG oslo_concurrency.lockutils [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 731.295446] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 731.295634] env[62522]: DEBUG oslo_concurrency.lockutils [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 731.295789] env[62522]: DEBUG oslo_concurrency.lockutils [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.296162] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 731.296467] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c8f4a9ab-6c51-4423-9835-ee27e48db8bb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.304586] env[62522]: DEBUG oslo_vmware.api [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415229, 'name': ReconfigVM_Task, 'duration_secs': 0.150905} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.305238] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 731.306178] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4228ba88-19d6-4417-bab5-60e9b60a8784 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.314046] env[62522]: DEBUG oslo_vmware.api [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 731.314046] env[62522]: value = "task-2415233" [ 731.314046] env[62522]: _type = "Task" [ 731.314046] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.315383] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a5008d07-d4ae-4dad-8909-4daec4e8a077 tempest-ServerShowV257Test-950868892 tempest-ServerShowV257Test-950868892-project-member] Lock "c73686c6-4dd8-4f00-a65a-5d8574409ad1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.031s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.316634] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 731.316810] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 731.321159] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0647841-a382-44f8-846c-fb81aa8178eb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.326641] env[62522]: DEBUG oslo_vmware.api [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Waiting for the task: (returnval){ [ 731.326641] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52cb2bbc-dff2-36a0-9635-1598745ef526" [ 731.326641] env[62522]: _type = "Task" [ 731.326641] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.335322] env[62522]: DEBUG oslo_vmware.api [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415233, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.345904] env[62522]: DEBUG oslo_vmware.api [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52cb2bbc-dff2-36a0-9635-1598745ef526, 'name': SearchDatastore_Task, 'duration_secs': 0.012827} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.348911] env[62522]: DEBUG oslo_vmware.api [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Task: {'id': task-2415230, 'name': Rename_Task, 'duration_secs': 0.15152} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.349243] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc1888ee-84f5-4adb-84da-84271ebb050e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.352378] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 731.353185] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f5e71049-a81c-48dd-b466-16c5334fc01c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.357586] env[62522]: DEBUG oslo_vmware.api [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Waiting for the task: (returnval){ [ 731.357586] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f3690f-e4e2-58f8-5ffc-c27804c0b039" [ 731.357586] env[62522]: _type = "Task" [ 731.357586] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.363062] env[62522]: DEBUG oslo_vmware.api [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Waiting for the task: (returnval){ [ 731.363062] env[62522]: value = "task-2415234" [ 731.363062] env[62522]: _type = "Task" [ 731.363062] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.366265] env[62522]: DEBUG oslo_vmware.api [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f3690f-e4e2-58f8-5ffc-c27804c0b039, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.376664] env[62522]: DEBUG oslo_vmware.api [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Task: {'id': task-2415234, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.479439] env[62522]: DEBUG oslo_vmware.api [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Task: {'id': task-2415232, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.153884} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.479439] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 731.479616] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 731.479721] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 731.479834] env[62522]: INFO nova.compute.manager [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Took 1.11 seconds to destroy the instance on the hypervisor. [ 731.480088] env[62522]: DEBUG oslo.service.loopingcall [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 731.480288] env[62522]: DEBUG nova.compute.manager [-] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 731.480383] env[62522]: DEBUG nova.network.neutron [-] [instance: 8461f823-e48a-42f0-8863-44177565b82d] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 731.500454] env[62522]: DEBUG oslo_concurrency.lockutils [req-6ab0ca19-348f-46d5-97c1-82bbe939e57f req-f73a3ce2-681f-491c-afbb-00252b26c5d8 service nova] Releasing lock "refresh_cache-87a90c88-6e0a-4051-8978-b2f9c5a876ca" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 731.794728] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.830743] env[62522]: DEBUG oslo_vmware.api [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415233, 'name': PowerOnVM_Task, 'duration_secs': 0.399972} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.835024] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 731.839211] env[62522]: DEBUG nova.compute.manager [None req-dd1c2b5e-e876-461a-8d9a-cf6e7ea25462 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 731.839791] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2ec990b-726f-47ad-8c0b-cca47c12ba4e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.870334] env[62522]: DEBUG oslo_vmware.api [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f3690f-e4e2-58f8-5ffc-c27804c0b039, 'name': SearchDatastore_Task, 'duration_secs': 0.022348} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.874684] env[62522]: DEBUG oslo_concurrency.lockutils [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 731.875309] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 87a90c88-6e0a-4051-8978-b2f9c5a876ca/87a90c88-6e0a-4051-8978-b2f9c5a876ca.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 731.878456] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3f6087fc-3a62-48da-9ae9-b7e3dcefbff4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.886842] env[62522]: DEBUG oslo_vmware.api [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Task: {'id': task-2415234, 'name': PowerOnVM_Task, 'duration_secs': 0.472164} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.888875] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 731.889268] env[62522]: INFO nova.compute.manager [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Took 9.21 seconds to spawn the instance on the hypervisor. [ 731.889489] env[62522]: DEBUG nova.compute.manager [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 731.889834] env[62522]: DEBUG oslo_vmware.api [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Waiting for the task: (returnval){ [ 731.889834] env[62522]: value = "task-2415235" [ 731.889834] env[62522]: _type = "Task" [ 731.889834] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.891333] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c48d34a-8173-46e0-a8f7-7d8cf16880e3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.909607] env[62522]: DEBUG oslo_vmware.api [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': task-2415235, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.266457] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e476015-8464-4d0d-aff1-dc5a59d50cd6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.279327] env[62522]: DEBUG nova.network.neutron [-] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.281702] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb4486c-5f01-4ae9-b080-6bb21ba6afb9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.322772] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bbe4522-5583-4b33-b590-ef0c5e2bbec0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.331714] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ec772f-1a1a-4160-aede-ddb698c51ff3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.350494] env[62522]: DEBUG nova.compute.provider_tree [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 732.402974] env[62522]: DEBUG oslo_vmware.api [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': task-2415235, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.420133] env[62522]: INFO nova.compute.manager [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Took 41.13 seconds to build instance. [ 732.697654] env[62522]: DEBUG nova.compute.manager [req-c78ca655-fb8e-49a4-a56d-d7cc418a6a51 req-3a15e88f-3a94-4f13-94b7-cefb3a861a59 service nova] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Received event network-vif-deleted-284f1edc-1e35-4812-9696-c3ef34dfba09 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 732.789821] env[62522]: INFO nova.compute.manager [-] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Took 1.31 seconds to deallocate network for instance. [ 732.854269] env[62522]: DEBUG nova.scheduler.client.report [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 732.909127] env[62522]: DEBUG oslo_vmware.api [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': task-2415235, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.893374} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.909328] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 87a90c88-6e0a-4051-8978-b2f9c5a876ca/87a90c88-6e0a-4051-8978-b2f9c5a876ca.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 732.909557] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 732.909944] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cd6142bf-bf74-4828-9fde-0992e6708703 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.918489] env[62522]: DEBUG oslo_vmware.api [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Waiting for the task: (returnval){ [ 732.918489] env[62522]: value = "task-2415236" [ 732.918489] env[62522]: _type = "Task" [ 732.918489] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.922429] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bd4eef58-7e96-45f2-931f-218e9a37866d tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Lock "ae3e55b8-00c1-4dae-9276-f46a1e17b80e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.925s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 732.931717] env[62522]: DEBUG oslo_vmware.api [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': task-2415236, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.297693] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 733.358387] env[62522]: DEBUG oslo_concurrency.lockutils [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.598s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 733.358979] env[62522]: DEBUG nova.compute.manager [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 733.362041] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.354s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 733.363466] env[62522]: INFO nova.compute.claims [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 733.429808] env[62522]: DEBUG nova.compute.manager [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 733.433692] env[62522]: DEBUG oslo_vmware.api [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': task-2415236, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.173957} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.433692] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 733.437902] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c03f9e0c-5b69-4288-bc59-d01e85a01df2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.464340] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Reconfiguring VM instance instance-0000001d to attach disk [datastore2] 87a90c88-6e0a-4051-8978-b2f9c5a876ca/87a90c88-6e0a-4051-8978-b2f9c5a876ca.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 733.464621] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-45058c44-8cc1-49db-b370-0a5ffb78b22f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.486107] env[62522]: DEBUG oslo_vmware.api [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Waiting for the task: (returnval){ [ 733.486107] env[62522]: value = "task-2415237" [ 733.486107] env[62522]: _type = "Task" [ 733.486107] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.494530] env[62522]: DEBUG oslo_vmware.api [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': task-2415237, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.869187] env[62522]: DEBUG nova.compute.utils [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 733.876039] env[62522]: DEBUG nova.compute.manager [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 733.876244] env[62522]: DEBUG nova.network.neutron [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 733.929423] env[62522]: DEBUG nova.policy [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '117741ff219249b5b5861807cd4d0326', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'faaea538586e4b93a78e5188cb096769', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 733.956680] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 733.999731] env[62522]: DEBUG oslo_vmware.api [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': task-2415237, 'name': ReconfigVM_Task, 'duration_secs': 0.311427} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.999731] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Reconfigured VM instance instance-0000001d to attach disk [datastore2] 87a90c88-6e0a-4051-8978-b2f9c5a876ca/87a90c88-6e0a-4051-8978-b2f9c5a876ca.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 733.999731] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0381977c-a5af-4c3b-833f-161b5ffaadd4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.007524] env[62522]: DEBUG oslo_vmware.api [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Waiting for the task: (returnval){ [ 734.007524] env[62522]: value = "task-2415238" [ 734.007524] env[62522]: _type = "Task" [ 734.007524] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.018054] env[62522]: DEBUG oslo_vmware.api [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': task-2415238, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.375306] env[62522]: DEBUG nova.compute.manager [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 734.400645] env[62522]: DEBUG nova.network.neutron [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Successfully created port: 1a476d07-4a16-4431-ba2e-bb302475c1f8 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 734.520985] env[62522]: DEBUG oslo_vmware.api [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': task-2415238, 'name': Rename_Task, 'duration_secs': 0.161233} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.521301] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 734.521587] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-72bdf76b-eabb-4a7c-aa1f-be8930344953 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.531469] env[62522]: DEBUG oslo_vmware.api [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Waiting for the task: (returnval){ [ 734.531469] env[62522]: value = "task-2415239" [ 734.531469] env[62522]: _type = "Task" [ 734.531469] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.549282] env[62522]: DEBUG oslo_vmware.api [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': task-2415239, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.831053] env[62522]: DEBUG nova.compute.manager [req-deb64f32-80ac-4b9c-a69b-a192a95889c5 req-7bee555f-c23b-4fde-99cd-103651ef09b3 service nova] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Received event network-changed-451cd3c2-eeba-4ad9-a5c5-2ff0a4ee7d71 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 734.831053] env[62522]: DEBUG nova.compute.manager [req-deb64f32-80ac-4b9c-a69b-a192a95889c5 req-7bee555f-c23b-4fde-99cd-103651ef09b3 service nova] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Refreshing instance network info cache due to event network-changed-451cd3c2-eeba-4ad9-a5c5-2ff0a4ee7d71. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 734.831053] env[62522]: DEBUG oslo_concurrency.lockutils [req-deb64f32-80ac-4b9c-a69b-a192a95889c5 req-7bee555f-c23b-4fde-99cd-103651ef09b3 service nova] Acquiring lock "refresh_cache-ae3e55b8-00c1-4dae-9276-f46a1e17b80e" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 734.831053] env[62522]: DEBUG oslo_concurrency.lockutils [req-deb64f32-80ac-4b9c-a69b-a192a95889c5 req-7bee555f-c23b-4fde-99cd-103651ef09b3 service nova] Acquired lock "refresh_cache-ae3e55b8-00c1-4dae-9276-f46a1e17b80e" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.831352] env[62522]: DEBUG nova.network.neutron [req-deb64f32-80ac-4b9c-a69b-a192a95889c5 req-7bee555f-c23b-4fde-99cd-103651ef09b3 service nova] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Refreshing network info cache for port 451cd3c2-eeba-4ad9-a5c5-2ff0a4ee7d71 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 734.978976] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d36f2769-ac29-4564-af90-fdae082d0df4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.987732] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfcd7b87-f1bf-4844-8435-992c42961d81 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.019282] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f70f59b1-f892-41ff-b4b9-5ebccc0946f4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.027470] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b6e8df2-eaa6-4b54-891b-31fe91494abe {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.051937] env[62522]: DEBUG nova.compute.provider_tree [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 735.057794] env[62522]: DEBUG oslo_vmware.api [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': task-2415239, 'name': PowerOnVM_Task, 'duration_secs': 0.503578} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.058313] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 735.059066] env[62522]: INFO nova.compute.manager [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Took 8.71 seconds to spawn the instance on the hypervisor. [ 735.059066] env[62522]: DEBUG nova.compute.manager [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 735.059771] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd707166-8e58-4f38-9a15-1ff3723fe54a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.384332] env[62522]: DEBUG nova.compute.manager [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 735.417802] env[62522]: DEBUG nova.virt.hardware [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 735.418079] env[62522]: DEBUG nova.virt.hardware [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 735.419265] env[62522]: DEBUG nova.virt.hardware [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 735.419265] env[62522]: DEBUG nova.virt.hardware [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 735.419265] env[62522]: DEBUG nova.virt.hardware [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 735.419265] env[62522]: DEBUG nova.virt.hardware [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 735.419265] env[62522]: DEBUG nova.virt.hardware [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 735.420312] env[62522]: DEBUG nova.virt.hardware [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 735.420506] env[62522]: DEBUG nova.virt.hardware [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 735.420678] env[62522]: DEBUG nova.virt.hardware [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 735.420849] env[62522]: DEBUG nova.virt.hardware [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 735.422045] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd7552b0-1dd7-4af1-a0f4-0f810d0e60a2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.430971] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da22ba58-990f-4c01-8fc0-7d5a60fba47d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.554826] env[62522]: INFO nova.compute.manager [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Rescuing [ 735.555369] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "refresh_cache-c181ce48-9fe2-4400-9047-f8b5a7159dd3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 735.555545] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquired lock "refresh_cache-c181ce48-9fe2-4400-9047-f8b5a7159dd3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.555718] env[62522]: DEBUG nova.network.neutron [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 735.560117] env[62522]: DEBUG nova.scheduler.client.report [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 735.586579] env[62522]: INFO nova.compute.manager [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Took 42.05 seconds to build instance. [ 735.659969] env[62522]: DEBUG nova.network.neutron [req-deb64f32-80ac-4b9c-a69b-a192a95889c5 req-7bee555f-c23b-4fde-99cd-103651ef09b3 service nova] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Updated VIF entry in instance network info cache for port 451cd3c2-eeba-4ad9-a5c5-2ff0a4ee7d71. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 735.659969] env[62522]: DEBUG nova.network.neutron [req-deb64f32-80ac-4b9c-a69b-a192a95889c5 req-7bee555f-c23b-4fde-99cd-103651ef09b3 service nova] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Updating instance_info_cache with network_info: [{"id": "451cd3c2-eeba-4ad9-a5c5-2ff0a4ee7d71", "address": "fa:16:3e:72:b3:7f", "network": {"id": "67803a10-3d20-412d-8239-f15c6c3e90db", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1068967527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c84fc4124fca4b12b4d3260601eeee83", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b49c5024-2ced-42ca-90cc-6066766d43e6", "external-id": "nsx-vlan-transportzone-239", "segmentation_id": 239, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap451cd3c2-ee", "ovs_interfaceid": "451cd3c2-eeba-4ad9-a5c5-2ff0a4ee7d71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.066118] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.704s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 736.066654] env[62522]: DEBUG nova.compute.manager [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 736.070931] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 35.673s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 736.090467] env[62522]: DEBUG oslo_concurrency.lockutils [None req-538af348-2d05-4886-97ce-b9d077434bcd tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Lock "87a90c88-6e0a-4051-8978-b2f9c5a876ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.536s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 736.163528] env[62522]: DEBUG oslo_concurrency.lockutils [req-deb64f32-80ac-4b9c-a69b-a192a95889c5 req-7bee555f-c23b-4fde-99cd-103651ef09b3 service nova] Releasing lock "refresh_cache-ae3e55b8-00c1-4dae-9276-f46a1e17b80e" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 736.559072] env[62522]: DEBUG nova.network.neutron [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Updating instance_info_cache with network_info: [{"id": "2e33c70f-036d-459c-a393-f570cbf7089c", "address": "fa:16:3e:58:55:4e", "network": {"id": "5f1d73d1-ff9e-4081-87cf-8df6294f67c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-892212702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "962664c996f24cf9ae192f79fae18ca4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "419a5b3f-4c6f-4168-9def-746b4d8c5c24", "external-id": "nsx-vlan-transportzone-656", "segmentation_id": 656, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e33c70f-03", "ovs_interfaceid": "2e33c70f-036d-459c-a393-f570cbf7089c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.572919] env[62522]: DEBUG nova.compute.utils [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 736.577662] env[62522]: DEBUG nova.compute.manager [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 736.577662] env[62522]: DEBUG nova.network.neutron [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 736.581454] env[62522]: INFO nova.compute.claims [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 736.593839] env[62522]: DEBUG nova.compute.manager [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 736.597421] env[62522]: DEBUG nova.network.neutron [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Successfully updated port: 1a476d07-4a16-4431-ba2e-bb302475c1f8 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 736.696594] env[62522]: DEBUG nova.policy [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '92852ca5b2214597818755ae79f626aa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '093c35d4ac794641ba9e8b51e6e3cb7a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 737.068415] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Releasing lock "refresh_cache-c181ce48-9fe2-4400-9047-f8b5a7159dd3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.075876] env[62522]: DEBUG nova.compute.manager [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 737.089013] env[62522]: INFO nova.compute.resource_tracker [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Updating resource usage from migration 232d6454-4d48-4be8-bf2c-81a73db59aa9 [ 737.102865] env[62522]: DEBUG oslo_concurrency.lockutils [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Acquiring lock "refresh_cache-194c1dd8-3b0a-4c29-9779-65f1534121d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.102865] env[62522]: DEBUG oslo_concurrency.lockutils [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Acquired lock "refresh_cache-194c1dd8-3b0a-4c29-9779-65f1534121d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.102865] env[62522]: DEBUG nova.network.neutron [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 737.129052] env[62522]: DEBUG oslo_concurrency.lockutils [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.244475] env[62522]: DEBUG nova.compute.manager [req-7131b870-e5e7-464b-9793-cc6f5cb91269 req-12f54b5b-1330-4e15-b23f-0cd8067f6035 service nova] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Received event network-vif-plugged-1a476d07-4a16-4431-ba2e-bb302475c1f8 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 737.244766] env[62522]: DEBUG oslo_concurrency.lockutils [req-7131b870-e5e7-464b-9793-cc6f5cb91269 req-12f54b5b-1330-4e15-b23f-0cd8067f6035 service nova] Acquiring lock "194c1dd8-3b0a-4c29-9779-65f1534121d1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.245340] env[62522]: DEBUG oslo_concurrency.lockutils [req-7131b870-e5e7-464b-9793-cc6f5cb91269 req-12f54b5b-1330-4e15-b23f-0cd8067f6035 service nova] Lock "194c1dd8-3b0a-4c29-9779-65f1534121d1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.245603] env[62522]: DEBUG oslo_concurrency.lockutils [req-7131b870-e5e7-464b-9793-cc6f5cb91269 req-12f54b5b-1330-4e15-b23f-0cd8067f6035 service nova] Lock "194c1dd8-3b0a-4c29-9779-65f1534121d1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.248117] env[62522]: DEBUG nova.compute.manager [req-7131b870-e5e7-464b-9793-cc6f5cb91269 req-12f54b5b-1330-4e15-b23f-0cd8067f6035 service nova] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] No waiting events found dispatching network-vif-plugged-1a476d07-4a16-4431-ba2e-bb302475c1f8 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 737.248356] env[62522]: WARNING nova.compute.manager [req-7131b870-e5e7-464b-9793-cc6f5cb91269 req-12f54b5b-1330-4e15-b23f-0cd8067f6035 service nova] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Received unexpected event network-vif-plugged-1a476d07-4a16-4431-ba2e-bb302475c1f8 for instance with vm_state building and task_state spawning. [ 737.248491] env[62522]: DEBUG nova.compute.manager [req-7131b870-e5e7-464b-9793-cc6f5cb91269 req-12f54b5b-1330-4e15-b23f-0cd8067f6035 service nova] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Received event network-changed-1a476d07-4a16-4431-ba2e-bb302475c1f8 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 737.248683] env[62522]: DEBUG nova.compute.manager [req-7131b870-e5e7-464b-9793-cc6f5cb91269 req-12f54b5b-1330-4e15-b23f-0cd8067f6035 service nova] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Refreshing instance network info cache due to event network-changed-1a476d07-4a16-4431-ba2e-bb302475c1f8. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 737.248850] env[62522]: DEBUG oslo_concurrency.lockutils [req-7131b870-e5e7-464b-9793-cc6f5cb91269 req-12f54b5b-1330-4e15-b23f-0cd8067f6035 service nova] Acquiring lock "refresh_cache-194c1dd8-3b0a-4c29-9779-65f1534121d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.319381] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Acquiring lock "87a90c88-6e0a-4051-8978-b2f9c5a876ca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.319381] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Lock "87a90c88-6e0a-4051-8978-b2f9c5a876ca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.319381] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Acquiring lock "87a90c88-6e0a-4051-8978-b2f9c5a876ca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.319381] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Lock "87a90c88-6e0a-4051-8978-b2f9c5a876ca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.319710] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Lock "87a90c88-6e0a-4051-8978-b2f9c5a876ca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.324905] env[62522]: INFO nova.compute.manager [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Terminating instance [ 737.439224] env[62522]: DEBUG nova.network.neutron [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Successfully created port: 00fd23e6-10da-4963-a366-1bec61020dd9 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 737.680963] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64a5d2a1-b3be-45d8-905d-7447a516d256 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.689029] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a6ed0fe-42fe-4b8e-a5c6-3baceda33047 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.725725] env[62522]: DEBUG nova.network.neutron [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 737.728951] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd0f7faa-03a0-4660-8bd8-34dce2a27eaf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.736750] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df9aa51-4605-4f46-b3bf-31304b0d0512 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.754423] env[62522]: DEBUG nova.compute.provider_tree [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 737.831802] env[62522]: DEBUG nova.compute.manager [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 737.832050] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 737.833238] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b54a99dd-0ba4-4872-bf10-afbb4f096c1e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.841489] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 737.841800] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1d56c843-0617-44ab-9174-949699da18a8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.848310] env[62522]: DEBUG oslo_vmware.api [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Waiting for the task: (returnval){ [ 737.848310] env[62522]: value = "task-2415240" [ 737.848310] env[62522]: _type = "Task" [ 737.848310] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.856416] env[62522]: DEBUG oslo_vmware.api [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': task-2415240, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.091929] env[62522]: DEBUG nova.compute.manager [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 738.116474] env[62522]: DEBUG nova.network.neutron [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Updating instance_info_cache with network_info: [{"id": "1a476d07-4a16-4431-ba2e-bb302475c1f8", "address": "fa:16:3e:5b:14:fc", "network": {"id": "00774907-e17b-4b92-a84f-d66005fed67a", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-119143966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "faaea538586e4b93a78e5188cb096769", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4813d311-2016-4f6a-a4b8-7613ab624fba", "external-id": "nsx-vlan-transportzone-386", "segmentation_id": 386, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a476d07-4a", "ovs_interfaceid": "1a476d07-4a16-4431-ba2e-bb302475c1f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.122673] env[62522]: DEBUG nova.virt.hardware [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 738.122673] env[62522]: DEBUG nova.virt.hardware [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 738.122673] env[62522]: DEBUG nova.virt.hardware [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 738.122859] env[62522]: DEBUG nova.virt.hardware [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 738.123045] env[62522]: DEBUG nova.virt.hardware [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 738.123218] env[62522]: DEBUG nova.virt.hardware [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 738.123431] env[62522]: DEBUG nova.virt.hardware [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 738.123590] env[62522]: DEBUG nova.virt.hardware [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 738.123779] env[62522]: DEBUG nova.virt.hardware [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 738.123985] env[62522]: DEBUG nova.virt.hardware [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 738.124197] env[62522]: DEBUG nova.virt.hardware [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 738.125372] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6b0fbdd-f2b9-47f7-be63-eb3968ee820d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.135341] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ad97ad-a72e-430d-ab39-aba2472af40f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.258290] env[62522]: DEBUG nova.scheduler.client.report [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 738.359297] env[62522]: DEBUG oslo_vmware.api [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': task-2415240, 'name': PowerOffVM_Task, 'duration_secs': 0.196917} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.359608] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 738.359942] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 738.361102] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fdc22d3b-4feb-4ada-a7b3-c270fd7ceece {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.421908] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 738.422739] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 738.422739] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Deleting the datastore file [datastore2] 87a90c88-6e0a-4051-8978-b2f9c5a876ca {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 738.422739] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-23edbd04-058f-49e0-85d1-1910465ed207 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.431655] env[62522]: DEBUG oslo_vmware.api [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Waiting for the task: (returnval){ [ 738.431655] env[62522]: value = "task-2415242" [ 738.431655] env[62522]: _type = "Task" [ 738.431655] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.441829] env[62522]: DEBUG oslo_vmware.api [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': task-2415242, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.618369] env[62522]: DEBUG oslo_concurrency.lockutils [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Releasing lock "refresh_cache-194c1dd8-3b0a-4c29-9779-65f1534121d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 738.618777] env[62522]: DEBUG nova.compute.manager [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Instance network_info: |[{"id": "1a476d07-4a16-4431-ba2e-bb302475c1f8", "address": "fa:16:3e:5b:14:fc", "network": {"id": "00774907-e17b-4b92-a84f-d66005fed67a", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-119143966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "faaea538586e4b93a78e5188cb096769", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4813d311-2016-4f6a-a4b8-7613ab624fba", "external-id": "nsx-vlan-transportzone-386", "segmentation_id": 386, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a476d07-4a", "ovs_interfaceid": "1a476d07-4a16-4431-ba2e-bb302475c1f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 738.619111] env[62522]: DEBUG oslo_concurrency.lockutils [req-7131b870-e5e7-464b-9793-cc6f5cb91269 req-12f54b5b-1330-4e15-b23f-0cd8067f6035 service nova] Acquired lock "refresh_cache-194c1dd8-3b0a-4c29-9779-65f1534121d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.619305] env[62522]: DEBUG nova.network.neutron [req-7131b870-e5e7-464b-9793-cc6f5cb91269 req-12f54b5b-1330-4e15-b23f-0cd8067f6035 service nova] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Refreshing network info cache for port 1a476d07-4a16-4431-ba2e-bb302475c1f8 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 738.621355] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5b:14:fc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4813d311-2016-4f6a-a4b8-7613ab624fba', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1a476d07-4a16-4431-ba2e-bb302475c1f8', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 738.629203] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Creating folder: Project (faaea538586e4b93a78e5188cb096769). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 738.631404] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 738.631404] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9cee7c61-c358-40d3-9060-b3b61e65413f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.632802] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a1711db3-52c1-49fe-a079-a7677d1f893e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.640581] env[62522]: DEBUG oslo_vmware.api [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 738.640581] env[62522]: value = "task-2415244" [ 738.640581] env[62522]: _type = "Task" [ 738.640581] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.645935] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Created folder: Project (faaea538586e4b93a78e5188cb096769) in parent group-v489562. [ 738.646162] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Creating folder: Instances. Parent ref: group-v489644. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 738.650339] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e09a3923-a376-465b-b87d-012cdb5ecdc7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.652549] env[62522]: DEBUG oslo_vmware.api [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415244, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.661245] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Created folder: Instances in parent group-v489644. [ 738.661555] env[62522]: DEBUG oslo.service.loopingcall [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 738.661762] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 738.661971] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fd3df94b-21d1-4895-afe0-4d2eb9da3fa6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.681725] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 738.681725] env[62522]: value = "task-2415246" [ 738.681725] env[62522]: _type = "Task" [ 738.681725] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.690741] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415246, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.769104] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.698s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 738.770060] env[62522]: INFO nova.compute.manager [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Migrating [ 738.770060] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 738.770060] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquired lock "compute-rpcapi-router" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.771417] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.552s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.773405] env[62522]: INFO nova.compute.claims [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 738.783315] env[62522]: INFO nova.compute.rpcapi [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 738.783951] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Releasing lock "compute-rpcapi-router" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 738.813297] env[62522]: DEBUG oslo_concurrency.lockutils [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Acquiring lock "7a086314-3e49-48e9-82c9-cead8ecb19d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.813561] env[62522]: DEBUG oslo_concurrency.lockutils [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Lock "7a086314-3e49-48e9-82c9-cead8ecb19d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.944160] env[62522]: DEBUG oslo_vmware.api [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Task: {'id': task-2415242, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.348582} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.944845] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 738.945183] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 738.945673] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 738.945929] env[62522]: INFO nova.compute.manager [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Took 1.11 seconds to destroy the instance on the hypervisor. [ 738.946236] env[62522]: DEBUG oslo.service.loopingcall [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 738.946631] env[62522]: DEBUG nova.compute.manager [-] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 738.946770] env[62522]: DEBUG nova.network.neutron [-] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 739.155054] env[62522]: DEBUG oslo_vmware.api [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415244, 'name': PowerOffVM_Task, 'duration_secs': 0.211503} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.155353] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 739.156177] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f66b120-cf32-4e50-a650-a7bd81504924 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.181407] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb885ede-f788-4322-9abc-980b164eaf1a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.197128] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415246, 'name': CreateVM_Task, 'duration_secs': 0.365219} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.197441] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 739.198055] env[62522]: DEBUG oslo_concurrency.lockutils [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.198222] env[62522]: DEBUG oslo_concurrency.lockutils [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.198762] env[62522]: DEBUG oslo_concurrency.lockutils [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 739.199013] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2836461a-106c-4e7f-b198-87111f934a14 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.209797] env[62522]: DEBUG oslo_vmware.api [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Waiting for the task: (returnval){ [ 739.209797] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5293958a-e377-b894-5c5e-f10fa42ac12b" [ 739.209797] env[62522]: _type = "Task" [ 739.209797] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.218294] env[62522]: DEBUG oslo_vmware.api [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5293958a-e377-b894-5c5e-f10fa42ac12b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.237961] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 739.238577] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1ffa8f8e-ecdc-4f6c-9e5b-bbb052db1b64 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.248108] env[62522]: DEBUG oslo_vmware.api [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 739.248108] env[62522]: value = "task-2415247" [ 739.248108] env[62522]: _type = "Task" [ 739.248108] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.257593] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] VM already powered off {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 739.257837] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 739.258402] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.258402] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.258534] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 739.258769] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-857b2a3a-3e4e-4d66-a4f5-1dfb8ed33346 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.267741] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 739.267891] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 739.269046] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19e70e40-258b-41e6-9062-e401f60a035b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.275365] env[62522]: DEBUG nova.compute.manager [req-2bda7d97-f825-4055-bd1a-3c41b1827719 req-a623182b-2d7d-4c2c-ad0a-7a825bb5da1e service nova] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Received event network-vif-deleted-6fc5a3d3-c159-4c8e-978d-723aeeb43441 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 739.275609] env[62522]: INFO nova.compute.manager [req-2bda7d97-f825-4055-bd1a-3c41b1827719 req-a623182b-2d7d-4c2c-ad0a-7a825bb5da1e service nova] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Neutron deleted interface 6fc5a3d3-c159-4c8e-978d-723aeeb43441; detaching it from the instance and deleting it from the info cache [ 739.276042] env[62522]: DEBUG nova.network.neutron [req-2bda7d97-f825-4055-bd1a-3c41b1827719 req-a623182b-2d7d-4c2c-ad0a-7a825bb5da1e service nova] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.281385] env[62522]: DEBUG oslo_vmware.api [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 739.281385] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c3b2b0-17ec-e32a-9b48-f3546052fb16" [ 739.281385] env[62522]: _type = "Task" [ 739.281385] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.296567] env[62522]: DEBUG oslo_vmware.api [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c3b2b0-17ec-e32a-9b48-f3546052fb16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.306790] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquiring lock "refresh_cache-879354d3-7423-41e2-93f6-0d8d3a120170" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.306971] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquired lock "refresh_cache-879354d3-7423-41e2-93f6-0d8d3a120170" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.307397] env[62522]: DEBUG nova.network.neutron [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 739.411138] env[62522]: DEBUG nova.network.neutron [req-7131b870-e5e7-464b-9793-cc6f5cb91269 req-12f54b5b-1330-4e15-b23f-0cd8067f6035 service nova] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Updated VIF entry in instance network info cache for port 1a476d07-4a16-4431-ba2e-bb302475c1f8. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 739.411540] env[62522]: DEBUG nova.network.neutron [req-7131b870-e5e7-464b-9793-cc6f5cb91269 req-12f54b5b-1330-4e15-b23f-0cd8067f6035 service nova] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Updating instance_info_cache with network_info: [{"id": "1a476d07-4a16-4431-ba2e-bb302475c1f8", "address": "fa:16:3e:5b:14:fc", "network": {"id": "00774907-e17b-4b92-a84f-d66005fed67a", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-119143966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "faaea538586e4b93a78e5188cb096769", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4813d311-2016-4f6a-a4b8-7613ab624fba", "external-id": "nsx-vlan-transportzone-386", "segmentation_id": 386, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a476d07-4a", "ovs_interfaceid": "1a476d07-4a16-4431-ba2e-bb302475c1f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.604115] env[62522]: DEBUG nova.network.neutron [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Successfully updated port: 00fd23e6-10da-4963-a366-1bec61020dd9 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 739.711187] env[62522]: DEBUG nova.network.neutron [-] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.724430] env[62522]: DEBUG oslo_vmware.api [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5293958a-e377-b894-5c5e-f10fa42ac12b, 'name': SearchDatastore_Task, 'duration_secs': 0.012189} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.724733] env[62522]: DEBUG oslo_concurrency.lockutils [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 739.724957] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 739.725236] env[62522]: DEBUG oslo_concurrency.lockutils [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.725379] env[62522]: DEBUG oslo_concurrency.lockutils [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.725554] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 739.726083] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8cec07ce-12e1-4ef5-a858-7ba73a127cc1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.734228] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 739.734402] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 739.735223] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d008433-f494-49e8-8c41-f184b6d91018 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.742937] env[62522]: DEBUG oslo_vmware.api [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Waiting for the task: (returnval){ [ 739.742937] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a40c57-10f4-3407-31c7-aadaad1d3456" [ 739.742937] env[62522]: _type = "Task" [ 739.742937] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.751695] env[62522]: DEBUG oslo_vmware.api [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a40c57-10f4-3407-31c7-aadaad1d3456, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.780144] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f03f6d7b-3542-45f4-b3d9-5277bd8656ad {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.792802] env[62522]: DEBUG oslo_vmware.api [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c3b2b0-17ec-e32a-9b48-f3546052fb16, 'name': SearchDatastore_Task, 'duration_secs': 0.012582} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.795029] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5b1ea95-fcbc-4367-8d5f-2c84e199a5ec {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.799082] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-973e0df4-1f6b-4c10-ab12-a1f2c3130255 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.818726] env[62522]: DEBUG oslo_vmware.api [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 739.818726] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5244b3f2-5170-19ee-1797-b72150f5edbb" [ 739.818726] env[62522]: _type = "Task" [ 739.818726] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.826805] env[62522]: DEBUG oslo_vmware.api [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5244b3f2-5170-19ee-1797-b72150f5edbb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.836328] env[62522]: DEBUG nova.compute.manager [req-2bda7d97-f825-4055-bd1a-3c41b1827719 req-a623182b-2d7d-4c2c-ad0a-7a825bb5da1e service nova] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Detach interface failed, port_id=6fc5a3d3-c159-4c8e-978d-723aeeb43441, reason: Instance 87a90c88-6e0a-4051-8978-b2f9c5a876ca could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 739.915675] env[62522]: DEBUG oslo_concurrency.lockutils [req-7131b870-e5e7-464b-9793-cc6f5cb91269 req-12f54b5b-1330-4e15-b23f-0cd8067f6035 service nova] Releasing lock "refresh_cache-194c1dd8-3b0a-4c29-9779-65f1534121d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 740.106851] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Acquiring lock "refresh_cache-5b69254a-b34b-48ff-a96c-d8573c9abf3b" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 740.107103] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Acquired lock "refresh_cache-5b69254a-b34b-48ff-a96c-d8573c9abf3b" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.107320] env[62522]: DEBUG nova.network.neutron [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 740.177611] env[62522]: DEBUG nova.network.neutron [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Updating instance_info_cache with network_info: [{"id": "cd619060-5655-434c-967f-7552adca021b", "address": "fa:16:3e:7d:62:dc", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd619060-56", "ovs_interfaceid": "cd619060-5655-434c-967f-7552adca021b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.215721] env[62522]: INFO nova.compute.manager [-] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Took 1.27 seconds to deallocate network for instance. [ 740.253547] env[62522]: DEBUG oslo_vmware.api [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a40c57-10f4-3407-31c7-aadaad1d3456, 'name': SearchDatastore_Task, 'duration_secs': 0.009585} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.254414] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-694be1fc-b0af-4078-80bc-6c6a8fe8fa63 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.260266] env[62522]: DEBUG oslo_vmware.api [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Waiting for the task: (returnval){ [ 740.260266] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52259d90-52cf-fa83-df28-7dfbec9f68fa" [ 740.260266] env[62522]: _type = "Task" [ 740.260266] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.271142] env[62522]: DEBUG oslo_vmware.api [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52259d90-52cf-fa83-df28-7dfbec9f68fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.329461] env[62522]: DEBUG oslo_vmware.api [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5244b3f2-5170-19ee-1797-b72150f5edbb, 'name': SearchDatastore_Task, 'duration_secs': 0.00845} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.331801] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 740.332087] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] c181ce48-9fe2-4400-9047-f8b5a7159dd3/2ee4561b-ba48-4f45-82f6-eac89be98290-rescue.vmdk. {{(pid=62522) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 740.332518] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d255c9ce-00eb-4dd2-8468-615b3c12db85 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.339432] env[62522]: DEBUG oslo_vmware.api [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 740.339432] env[62522]: value = "task-2415248" [ 740.339432] env[62522]: _type = "Task" [ 740.339432] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.349684] env[62522]: DEBUG oslo_vmware.api [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415248, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.393889] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-874472bb-3d8d-4c5c-bc35-29f2d5ba26db {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.401672] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23b8f36f-e53f-44bc-aa57-034026af64e6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.432972] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b66d488-6993-4717-a937-b3ea36180341 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.440562] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-745c1dcf-e234-4c0f-b66c-2448687dba7b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.453909] env[62522]: DEBUG nova.compute.provider_tree [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 740.674732] env[62522]: DEBUG nova.network.neutron [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 740.680636] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Releasing lock "refresh_cache-879354d3-7423-41e2-93f6-0d8d3a120170" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 740.726780] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 740.774562] env[62522]: DEBUG oslo_vmware.api [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52259d90-52cf-fa83-df28-7dfbec9f68fa, 'name': SearchDatastore_Task, 'duration_secs': 0.0103} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.774888] env[62522]: DEBUG oslo_concurrency.lockutils [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 740.775258] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 194c1dd8-3b0a-4c29-9779-65f1534121d1/194c1dd8-3b0a-4c29-9779-65f1534121d1.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 740.775575] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3166c227-745e-437f-b693-446c853de316 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.786643] env[62522]: DEBUG oslo_vmware.api [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Waiting for the task: (returnval){ [ 740.786643] env[62522]: value = "task-2415249" [ 740.786643] env[62522]: _type = "Task" [ 740.786643] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.795556] env[62522]: DEBUG oslo_vmware.api [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Task: {'id': task-2415249, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.851628] env[62522]: DEBUG oslo_vmware.api [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415248, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.931402] env[62522]: DEBUG nova.network.neutron [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Updating instance_info_cache with network_info: [{"id": "00fd23e6-10da-4963-a366-1bec61020dd9", "address": "fa:16:3e:1d:67:f2", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.46", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00fd23e6-10", "ovs_interfaceid": "00fd23e6-10da-4963-a366-1bec61020dd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.958106] env[62522]: DEBUG nova.scheduler.client.report [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 741.295889] env[62522]: DEBUG oslo_vmware.api [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Task: {'id': task-2415249, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.350528] env[62522]: DEBUG oslo_vmware.api [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415248, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.827365} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.350797] env[62522]: INFO nova.virt.vmwareapi.ds_util [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] c181ce48-9fe2-4400-9047-f8b5a7159dd3/2ee4561b-ba48-4f45-82f6-eac89be98290-rescue.vmdk. [ 741.351741] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c15c5ca-dbb2-42ce-bddc-b269a607f010 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.378152] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Reconfiguring VM instance instance-00000016 to attach disk [datastore2] c181ce48-9fe2-4400-9047-f8b5a7159dd3/2ee4561b-ba48-4f45-82f6-eac89be98290-rescue.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 741.378483] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0100c13-1974-4509-a118-3eb10bcf29b8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.397656] env[62522]: DEBUG oslo_vmware.api [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 741.397656] env[62522]: value = "task-2415250" [ 741.397656] env[62522]: _type = "Task" [ 741.397656] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.407019] env[62522]: DEBUG oslo_vmware.api [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415250, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.434019] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Releasing lock "refresh_cache-5b69254a-b34b-48ff-a96c-d8573c9abf3b" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.434539] env[62522]: DEBUG nova.compute.manager [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Instance network_info: |[{"id": "00fd23e6-10da-4963-a366-1bec61020dd9", "address": "fa:16:3e:1d:67:f2", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.46", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00fd23e6-10", "ovs_interfaceid": "00fd23e6-10da-4963-a366-1bec61020dd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 741.435484] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1d:67:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f6d1427-d86b-4371-9172-50e4bb0eb1cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '00fd23e6-10da-4963-a366-1bec61020dd9', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 741.446949] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Creating folder: Project (093c35d4ac794641ba9e8b51e6e3cb7a). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 741.447440] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-96b10037-ac24-4e6e-be04-a79edcaaeaa0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.458868] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Created folder: Project (093c35d4ac794641ba9e8b51e6e3cb7a) in parent group-v489562. [ 741.458868] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Creating folder: Instances. Parent ref: group-v489647. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 741.460040] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-10927b7e-be60-4a30-921a-34516542828a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.464609] env[62522]: DEBUG nova.compute.manager [req-567e16d6-285a-4968-9ab0-2e202c67b1db req-7f0fdc9d-cd59-47a0-8907-74e31a86a1b5 service nova] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Received event network-vif-plugged-00fd23e6-10da-4963-a366-1bec61020dd9 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 741.464900] env[62522]: DEBUG oslo_concurrency.lockutils [req-567e16d6-285a-4968-9ab0-2e202c67b1db req-7f0fdc9d-cd59-47a0-8907-74e31a86a1b5 service nova] Acquiring lock "5b69254a-b34b-48ff-a96c-d8573c9abf3b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.465293] env[62522]: DEBUG oslo_concurrency.lockutils [req-567e16d6-285a-4968-9ab0-2e202c67b1db req-7f0fdc9d-cd59-47a0-8907-74e31a86a1b5 service nova] Lock "5b69254a-b34b-48ff-a96c-d8573c9abf3b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 741.465477] env[62522]: DEBUG oslo_concurrency.lockutils [req-567e16d6-285a-4968-9ab0-2e202c67b1db req-7f0fdc9d-cd59-47a0-8907-74e31a86a1b5 service nova] Lock "5b69254a-b34b-48ff-a96c-d8573c9abf3b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 741.465693] env[62522]: DEBUG nova.compute.manager [req-567e16d6-285a-4968-9ab0-2e202c67b1db req-7f0fdc9d-cd59-47a0-8907-74e31a86a1b5 service nova] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] No waiting events found dispatching network-vif-plugged-00fd23e6-10da-4963-a366-1bec61020dd9 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 741.465990] env[62522]: WARNING nova.compute.manager [req-567e16d6-285a-4968-9ab0-2e202c67b1db req-7f0fdc9d-cd59-47a0-8907-74e31a86a1b5 service nova] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Received unexpected event network-vif-plugged-00fd23e6-10da-4963-a366-1bec61020dd9 for instance with vm_state building and task_state spawning. [ 741.466202] env[62522]: DEBUG nova.compute.manager [req-567e16d6-285a-4968-9ab0-2e202c67b1db req-7f0fdc9d-cd59-47a0-8907-74e31a86a1b5 service nova] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Received event network-changed-00fd23e6-10da-4963-a366-1bec61020dd9 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 741.466360] env[62522]: DEBUG nova.compute.manager [req-567e16d6-285a-4968-9ab0-2e202c67b1db req-7f0fdc9d-cd59-47a0-8907-74e31a86a1b5 service nova] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Refreshing instance network info cache due to event network-changed-00fd23e6-10da-4963-a366-1bec61020dd9. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 741.466595] env[62522]: DEBUG oslo_concurrency.lockutils [req-567e16d6-285a-4968-9ab0-2e202c67b1db req-7f0fdc9d-cd59-47a0-8907-74e31a86a1b5 service nova] Acquiring lock "refresh_cache-5b69254a-b34b-48ff-a96c-d8573c9abf3b" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.467015] env[62522]: DEBUG oslo_concurrency.lockutils [req-567e16d6-285a-4968-9ab0-2e202c67b1db req-7f0fdc9d-cd59-47a0-8907-74e31a86a1b5 service nova] Acquired lock "refresh_cache-5b69254a-b34b-48ff-a96c-d8573c9abf3b" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.467015] env[62522]: DEBUG nova.network.neutron [req-567e16d6-285a-4968-9ab0-2e202c67b1db req-7f0fdc9d-cd59-47a0-8907-74e31a86a1b5 service nova] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Refreshing network info cache for port 00fd23e6-10da-4963-a366-1bec61020dd9 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 741.470087] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.699s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 741.471029] env[62522]: DEBUG nova.compute.manager [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 741.478065] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.595s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 741.478065] env[62522]: INFO nova.compute.claims [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 741.491971] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Created folder: Instances in parent group-v489647. [ 741.491971] env[62522]: DEBUG oslo.service.loopingcall [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 741.491971] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 741.491971] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-31b83726-7de3-41bc-88b6-73dfb9cab801 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.512787] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 741.512787] env[62522]: value = "task-2415253" [ 741.512787] env[62522]: _type = "Task" [ 741.512787] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.524208] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415253, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.799378] env[62522]: DEBUG oslo_vmware.api [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Task: {'id': task-2415249, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.011184} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.799755] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 194c1dd8-3b0a-4c29-9779-65f1534121d1/194c1dd8-3b0a-4c29-9779-65f1534121d1.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 741.799913] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 741.800196] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-040e759a-0eff-4f3e-9f4d-5b00f6cbd693 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.806887] env[62522]: DEBUG oslo_vmware.api [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Waiting for the task: (returnval){ [ 741.806887] env[62522]: value = "task-2415254" [ 741.806887] env[62522]: _type = "Task" [ 741.806887] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.815727] env[62522]: DEBUG oslo_vmware.api [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Task: {'id': task-2415254, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.909303] env[62522]: DEBUG oslo_vmware.api [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415250, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.987540] env[62522]: DEBUG nova.compute.utils [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 741.989390] env[62522]: DEBUG nova.compute.manager [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 741.989671] env[62522]: DEBUG nova.network.neutron [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 742.027099] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415253, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.036032] env[62522]: DEBUG nova.policy [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c6cd8ac5e524880bd4fc6a373dd35fc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0dba307f1fbf48bfac98d9836a72254e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 742.201789] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cda91e5b-0278-4c15-ad9f-75bb9c024765 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.220277] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Updating instance '879354d3-7423-41e2-93f6-0d8d3a120170' progress to 0 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 742.268972] env[62522]: DEBUG nova.network.neutron [req-567e16d6-285a-4968-9ab0-2e202c67b1db req-7f0fdc9d-cd59-47a0-8907-74e31a86a1b5 service nova] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Updated VIF entry in instance network info cache for port 00fd23e6-10da-4963-a366-1bec61020dd9. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 742.268972] env[62522]: DEBUG nova.network.neutron [req-567e16d6-285a-4968-9ab0-2e202c67b1db req-7f0fdc9d-cd59-47a0-8907-74e31a86a1b5 service nova] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Updating instance_info_cache with network_info: [{"id": "00fd23e6-10da-4963-a366-1bec61020dd9", "address": "fa:16:3e:1d:67:f2", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.46", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00fd23e6-10", "ovs_interfaceid": "00fd23e6-10da-4963-a366-1bec61020dd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.322293] env[62522]: DEBUG oslo_vmware.api [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Task: {'id': task-2415254, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.217161} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.322350] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 742.323203] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66365c8-0010-41e0-8818-0c42cb86319a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.348567] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] 194c1dd8-3b0a-4c29-9779-65f1534121d1/194c1dd8-3b0a-4c29-9779-65f1534121d1.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 742.348885] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c17f1bf-eb49-4989-a568-8405545b84b5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.372997] env[62522]: DEBUG oslo_vmware.api [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Waiting for the task: (returnval){ [ 742.372997] env[62522]: value = "task-2415255" [ 742.372997] env[62522]: _type = "Task" [ 742.372997] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.383020] env[62522]: DEBUG oslo_vmware.api [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Task: {'id': task-2415255, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.384364] env[62522]: DEBUG nova.network.neutron [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Successfully created port: e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 742.409366] env[62522]: DEBUG oslo_vmware.api [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415250, 'name': ReconfigVM_Task, 'duration_secs': 0.678076} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.409659] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Reconfigured VM instance instance-00000016 to attach disk [datastore2] c181ce48-9fe2-4400-9047-f8b5a7159dd3/2ee4561b-ba48-4f45-82f6-eac89be98290-rescue.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 742.410594] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-247e509d-cda9-45b4-8701-9bce3d5807c2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.436077] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f59b9b6f-3e96-443a-a992-2f7582e31bfb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.453075] env[62522]: DEBUG oslo_vmware.api [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 742.453075] env[62522]: value = "task-2415256" [ 742.453075] env[62522]: _type = "Task" [ 742.453075] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.461275] env[62522]: DEBUG oslo_vmware.api [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415256, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.493112] env[62522]: DEBUG nova.compute.manager [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 742.527974] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415253, 'name': CreateVM_Task, 'duration_secs': 0.711045} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.529416] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 742.529416] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 742.529933] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.530829] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 742.531275] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a4aa17e-283b-4a6d-af2c-f3ba523f5ace {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.536011] env[62522]: DEBUG oslo_vmware.api [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Waiting for the task: (returnval){ [ 742.536011] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5218e8be-2f94-6d61-e8bb-3a9b63602896" [ 742.536011] env[62522]: _type = "Task" [ 742.536011] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.545821] env[62522]: DEBUG oslo_vmware.api [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5218e8be-2f94-6d61-e8bb-3a9b63602896, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.726218] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 742.726508] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b2afdd1a-c960-417b-a848-3d0fc0346903 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.735894] env[62522]: DEBUG oslo_vmware.api [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 742.735894] env[62522]: value = "task-2415257" [ 742.735894] env[62522]: _type = "Task" [ 742.735894] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.746033] env[62522]: DEBUG oslo_vmware.api [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415257, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.771569] env[62522]: DEBUG oslo_concurrency.lockutils [req-567e16d6-285a-4968-9ab0-2e202c67b1db req-7f0fdc9d-cd59-47a0-8907-74e31a86a1b5 service nova] Releasing lock "refresh_cache-5b69254a-b34b-48ff-a96c-d8573c9abf3b" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 742.883931] env[62522]: DEBUG oslo_vmware.api [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Task: {'id': task-2415255, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.955355] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91e32469-58a6-48fe-b145-c943e4fa6b84 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.969459] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b9fc54c-8888-46aa-8571-bb986e2bfa14 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.972705] env[62522]: DEBUG oslo_vmware.api [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415256, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.008149] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f09026bb-9c64-40b7-ab2a-184c024bd0c1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.016208] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29d18215-f22c-4f36-ae49-cf62ca4117b1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.033098] env[62522]: DEBUG nova.compute.provider_tree [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 743.049534] env[62522]: DEBUG oslo_vmware.api [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5218e8be-2f94-6d61-e8bb-3a9b63602896, 'name': SearchDatastore_Task, 'duration_secs': 0.011314} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.049994] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 743.050364] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 743.050727] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 743.050975] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.051291] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 743.051874] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-62bd70f4-2821-46c4-97ab-6aa1dcd62fee {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.060637] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 743.060901] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 743.062062] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d27fa98-651f-431c-8b62-5601e50c73bc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.070327] env[62522]: DEBUG oslo_vmware.api [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Waiting for the task: (returnval){ [ 743.070327] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521f7489-48a8-d62f-2a45-c95d44df8073" [ 743.070327] env[62522]: _type = "Task" [ 743.070327] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.078857] env[62522]: DEBUG oslo_vmware.api [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521f7489-48a8-d62f-2a45-c95d44df8073, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.245936] env[62522]: DEBUG oslo_vmware.api [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415257, 'name': PowerOffVM_Task, 'duration_secs': 0.287016} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.246235] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 743.246427] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Updating instance '879354d3-7423-41e2-93f6-0d8d3a120170' progress to 17 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 743.384706] env[62522]: DEBUG oslo_vmware.api [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Task: {'id': task-2415255, 'name': ReconfigVM_Task, 'duration_secs': 0.896926} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.385289] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Reconfigured VM instance instance-0000001e to attach disk [datastore1] 194c1dd8-3b0a-4c29-9779-65f1534121d1/194c1dd8-3b0a-4c29-9779-65f1534121d1.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 743.386274] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bf2a8133-c58c-44a7-89de-a830434c673a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.392924] env[62522]: DEBUG oslo_vmware.api [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Waiting for the task: (returnval){ [ 743.392924] env[62522]: value = "task-2415258" [ 743.392924] env[62522]: _type = "Task" [ 743.392924] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.401732] env[62522]: DEBUG oslo_vmware.api [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Task: {'id': task-2415258, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.465590] env[62522]: DEBUG oslo_vmware.api [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415256, 'name': ReconfigVM_Task, 'duration_secs': 0.687885} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.465882] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 743.466950] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-50761ea8-084b-4f5f-8033-1e2cf642eea2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.474325] env[62522]: DEBUG oslo_vmware.api [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 743.474325] env[62522]: value = "task-2415259" [ 743.474325] env[62522]: _type = "Task" [ 743.474325] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.482120] env[62522]: DEBUG oslo_vmware.api [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415259, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.512703] env[62522]: DEBUG nova.compute.manager [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 743.538741] env[62522]: DEBUG nova.virt.hardware [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 743.538939] env[62522]: DEBUG nova.virt.hardware [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 743.538971] env[62522]: DEBUG nova.virt.hardware [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 743.541452] env[62522]: DEBUG nova.virt.hardware [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 743.541452] env[62522]: DEBUG nova.virt.hardware [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 743.541452] env[62522]: DEBUG nova.virt.hardware [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 743.541452] env[62522]: DEBUG nova.virt.hardware [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 743.541452] env[62522]: DEBUG nova.virt.hardware [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 743.541680] env[62522]: DEBUG nova.virt.hardware [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 743.541680] env[62522]: DEBUG nova.virt.hardware [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 743.541680] env[62522]: DEBUG nova.virt.hardware [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 743.541680] env[62522]: DEBUG nova.scheduler.client.report [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 743.544829] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9813fdc6-c209-417c-b4e1-e82f110bc8be {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.558635] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51a884c2-c4e8-4955-b3b6-a91ef9178389 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.583835] env[62522]: DEBUG oslo_vmware.api [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521f7489-48a8-d62f-2a45-c95d44df8073, 'name': SearchDatastore_Task, 'duration_secs': 0.008483} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.585695] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-baeb8ac3-dc9e-43b3-8b01-518cffb8376b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.593408] env[62522]: DEBUG oslo_vmware.api [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Waiting for the task: (returnval){ [ 743.593408] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bc3dab-9365-1e53-ab85-985ae88fcb4b" [ 743.593408] env[62522]: _type = "Task" [ 743.593408] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.602080] env[62522]: DEBUG oslo_vmware.api [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bc3dab-9365-1e53-ab85-985ae88fcb4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.756135] env[62522]: DEBUG nova.virt.hardware [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:04Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 743.756135] env[62522]: DEBUG nova.virt.hardware [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 743.756135] env[62522]: DEBUG nova.virt.hardware [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 743.756135] env[62522]: DEBUG nova.virt.hardware [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 743.756361] env[62522]: DEBUG nova.virt.hardware [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 743.756361] env[62522]: DEBUG nova.virt.hardware [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 743.756361] env[62522]: DEBUG nova.virt.hardware [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 743.756361] env[62522]: DEBUG nova.virt.hardware [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 743.756789] env[62522]: DEBUG nova.virt.hardware [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 743.757147] env[62522]: DEBUG nova.virt.hardware [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 743.757870] env[62522]: DEBUG nova.virt.hardware [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 743.765971] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5da4ccfc-32ce-47f1-a0a2-4c0ad4353afc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.783027] env[62522]: DEBUG oslo_vmware.api [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 743.783027] env[62522]: value = "task-2415260" [ 743.783027] env[62522]: _type = "Task" [ 743.783027] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.790246] env[62522]: DEBUG oslo_vmware.api [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415260, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.841314] env[62522]: DEBUG nova.compute.manager [req-1b0a6755-246d-4b37-9500-8f4fb4b7fd9c req-a21a53d6-516f-446f-a587-4651aaf0a007 service nova] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Received event network-vif-plugged-e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 743.841314] env[62522]: DEBUG oslo_concurrency.lockutils [req-1b0a6755-246d-4b37-9500-8f4fb4b7fd9c req-a21a53d6-516f-446f-a587-4651aaf0a007 service nova] Acquiring lock "d266aff3-42b4-4dcb-b8ca-7c13cdf8d314-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.841314] env[62522]: DEBUG oslo_concurrency.lockutils [req-1b0a6755-246d-4b37-9500-8f4fb4b7fd9c req-a21a53d6-516f-446f-a587-4651aaf0a007 service nova] Lock "d266aff3-42b4-4dcb-b8ca-7c13cdf8d314-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.841591] env[62522]: DEBUG oslo_concurrency.lockutils [req-1b0a6755-246d-4b37-9500-8f4fb4b7fd9c req-a21a53d6-516f-446f-a587-4651aaf0a007 service nova] Lock "d266aff3-42b4-4dcb-b8ca-7c13cdf8d314-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 743.841647] env[62522]: DEBUG nova.compute.manager [req-1b0a6755-246d-4b37-9500-8f4fb4b7fd9c req-a21a53d6-516f-446f-a587-4651aaf0a007 service nova] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] No waiting events found dispatching network-vif-plugged-e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 743.841780] env[62522]: WARNING nova.compute.manager [req-1b0a6755-246d-4b37-9500-8f4fb4b7fd9c req-a21a53d6-516f-446f-a587-4651aaf0a007 service nova] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Received unexpected event network-vif-plugged-e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55 for instance with vm_state building and task_state spawning. [ 743.903032] env[62522]: DEBUG oslo_vmware.api [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Task: {'id': task-2415258, 'name': Rename_Task, 'duration_secs': 0.150511} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.903298] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 743.903528] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dd31ce96-eea9-4fc0-89b7-caf0f352f179 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.909479] env[62522]: DEBUG oslo_vmware.api [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Waiting for the task: (returnval){ [ 743.909479] env[62522]: value = "task-2415261" [ 743.909479] env[62522]: _type = "Task" [ 743.909479] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.917995] env[62522]: DEBUG oslo_vmware.api [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Task: {'id': task-2415261, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.944269] env[62522]: DEBUG nova.network.neutron [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Successfully updated port: e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 743.984383] env[62522]: DEBUG oslo_vmware.api [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415259, 'name': PowerOnVM_Task, 'duration_secs': 0.398065} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.984646] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 743.988364] env[62522]: DEBUG nova.compute.manager [None req-2eb5247e-3104-4d6f-9069-60a74ed477d7 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 743.989833] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae9d909-7914-4e82-b4a2-bb757914223f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.048862] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.573s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.049413] env[62522]: DEBUG nova.compute.manager [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 744.051952] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.721s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 744.054905] env[62522]: INFO nova.compute.claims [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 744.105155] env[62522]: DEBUG oslo_vmware.api [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bc3dab-9365-1e53-ab85-985ae88fcb4b, 'name': SearchDatastore_Task, 'duration_secs': 0.009578} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.105424] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 744.105685] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 5b69254a-b34b-48ff-a96c-d8573c9abf3b/5b69254a-b34b-48ff-a96c-d8573c9abf3b.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 744.105948] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c6536efb-76d3-450a-a6e4-ee95bce52241 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.112227] env[62522]: DEBUG oslo_vmware.api [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Waiting for the task: (returnval){ [ 744.112227] env[62522]: value = "task-2415262" [ 744.112227] env[62522]: _type = "Task" [ 744.112227] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.120466] env[62522]: DEBUG oslo_vmware.api [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Task: {'id': task-2415262, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.291946] env[62522]: DEBUG oslo_vmware.api [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415260, 'name': ReconfigVM_Task, 'duration_secs': 0.203682} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.292348] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Updating instance '879354d3-7423-41e2-93f6-0d8d3a120170' progress to 33 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 744.422627] env[62522]: DEBUG oslo_vmware.api [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Task: {'id': task-2415261, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.447529] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Acquiring lock "refresh_cache-d266aff3-42b4-4dcb-b8ca-7c13cdf8d314" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 744.447613] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Acquired lock "refresh_cache-d266aff3-42b4-4dcb-b8ca-7c13cdf8d314" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.447761] env[62522]: DEBUG nova.network.neutron [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 744.561258] env[62522]: DEBUG nova.compute.utils [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 744.565812] env[62522]: DEBUG nova.compute.manager [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 744.565812] env[62522]: DEBUG nova.network.neutron [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 744.622961] env[62522]: DEBUG oslo_vmware.api [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Task: {'id': task-2415262, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.624815] env[62522]: DEBUG nova.policy [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aef4d9ff8c95414a8c680ca612baa660', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '04ba6295b89743a184cc64343ac6bbaf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 744.799479] env[62522]: DEBUG nova.virt.hardware [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 744.799778] env[62522]: DEBUG nova.virt.hardware [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 744.799988] env[62522]: DEBUG nova.virt.hardware [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 744.800291] env[62522]: DEBUG nova.virt.hardware [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 744.800498] env[62522]: DEBUG nova.virt.hardware [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 744.800702] env[62522]: DEBUG nova.virt.hardware [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 744.801108] env[62522]: DEBUG nova.virt.hardware [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 744.801187] env[62522]: DEBUG nova.virt.hardware [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 744.801394] env[62522]: DEBUG nova.virt.hardware [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 744.801613] env[62522]: DEBUG nova.virt.hardware [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 744.801860] env[62522]: DEBUG nova.virt.hardware [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 744.807419] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Reconfiguring VM instance instance-00000014 to detach disk 2000 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 744.807717] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fdf1bacf-6e04-4029-affb-0b0173c3a8bb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.826269] env[62522]: DEBUG oslo_vmware.api [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 744.826269] env[62522]: value = "task-2415263" [ 744.826269] env[62522]: _type = "Task" [ 744.826269] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.834833] env[62522]: DEBUG oslo_vmware.api [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415263, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.920235] env[62522]: DEBUG oslo_vmware.api [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Task: {'id': task-2415261, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.921085] env[62522]: DEBUG nova.network.neutron [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Successfully created port: e44d8202-0840-41f3-a86d-8baffc8c19dd {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 744.986013] env[62522]: DEBUG nova.network.neutron [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 745.065918] env[62522]: DEBUG nova.compute.manager [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 745.122387] env[62522]: DEBUG oslo_vmware.api [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Task: {'id': task-2415262, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.682416} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.124821] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 5b69254a-b34b-48ff-a96c-d8573c9abf3b/5b69254a-b34b-48ff-a96c-d8573c9abf3b.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 745.125098] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 745.125584] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f10ea9c8-0f85-47c8-abd8-4f9d39a3f184 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.134459] env[62522]: DEBUG oslo_vmware.api [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Waiting for the task: (returnval){ [ 745.134459] env[62522]: value = "task-2415264" [ 745.134459] env[62522]: _type = "Task" [ 745.134459] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.144144] env[62522]: DEBUG oslo_vmware.api [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Task: {'id': task-2415264, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.165736] env[62522]: DEBUG nova.network.neutron [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Updating instance_info_cache with network_info: [{"id": "e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55", "address": "fa:16:3e:94:f4:ed", "network": {"id": "eb0d2ded-859d-46b3-843d-bb580d0bfb6b", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-831675108-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0dba307f1fbf48bfac98d9836a72254e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0fac98f-bf", "ovs_interfaceid": "e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.338562] env[62522]: DEBUG oslo_vmware.api [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415263, 'name': ReconfigVM_Task, 'duration_secs': 0.190677} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.338633] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Reconfigured VM instance instance-00000014 to detach disk 2000 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 745.339467] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fd60fe2-88f4-48f5-a7b9-2a1e019425e1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.363939] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Reconfiguring VM instance instance-00000014 to attach disk [datastore2] 879354d3-7423-41e2-93f6-0d8d3a120170/879354d3-7423-41e2-93f6-0d8d3a120170.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 745.364278] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-03dd1fe4-0252-4865-9441-e01863c9d44d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.385654] env[62522]: DEBUG oslo_vmware.api [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 745.385654] env[62522]: value = "task-2415265" [ 745.385654] env[62522]: _type = "Task" [ 745.385654] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.396596] env[62522]: DEBUG oslo_vmware.api [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415265, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.422333] env[62522]: DEBUG oslo_vmware.api [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Task: {'id': task-2415261, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.556944] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2007a01a-0553-459d-b9c3-afcb71201973 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.566347] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-846a5160-3bdf-4c06-b9d2-d7af84fd8e4b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.599039] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-038898e3-b27b-49f3-b139-4132f8cec090 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.606606] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f9f4fc5-8329-48a3-be50-16a50f23c23e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.620301] env[62522]: DEBUG nova.compute.provider_tree [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 745.651384] env[62522]: DEBUG oslo_vmware.api [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Task: {'id': task-2415264, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069471} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.651661] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 745.653283] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9515a999-80e8-48c9-bda0-fb28fc0510fb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.668908] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Releasing lock "refresh_cache-d266aff3-42b4-4dcb-b8ca-7c13cdf8d314" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 745.669245] env[62522]: DEBUG nova.compute.manager [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Instance network_info: |[{"id": "e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55", "address": "fa:16:3e:94:f4:ed", "network": {"id": "eb0d2ded-859d-46b3-843d-bb580d0bfb6b", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-831675108-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0dba307f1fbf48bfac98d9836a72254e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0fac98f-bf", "ovs_interfaceid": "e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 745.679071] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] 5b69254a-b34b-48ff-a96c-d8573c9abf3b/5b69254a-b34b-48ff-a96c-d8573c9abf3b.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 745.679071] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:94:f4:ed', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6ab2e9f5-54fd-4cab-9405-ed65e2aaba64', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 745.685624] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Creating folder: Project (0dba307f1fbf48bfac98d9836a72254e). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 745.686208] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c0dcc8c6-c44b-468d-82e0-251f525f3af5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.700180] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b16074a0-e60e-4d18-8c6a-208d2a8b3b56 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.708661] env[62522]: DEBUG oslo_vmware.api [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Waiting for the task: (returnval){ [ 745.708661] env[62522]: value = "task-2415267" [ 745.708661] env[62522]: _type = "Task" [ 745.708661] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.712468] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Created folder: Project (0dba307f1fbf48bfac98d9836a72254e) in parent group-v489562. [ 745.712658] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Creating folder: Instances. Parent ref: group-v489650. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 745.713185] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-35500d2e-b12f-4182-bf8e-d2b4f6a4ea46 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.717554] env[62522]: DEBUG oslo_vmware.api [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Task: {'id': task-2415267, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.727428] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Created folder: Instances in parent group-v489650. [ 745.727660] env[62522]: DEBUG oslo.service.loopingcall [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 745.727847] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 745.728080] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4228309d-6bd1-4893-a104-6723c8fc8239 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.746502] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 745.746502] env[62522]: value = "task-2415269" [ 745.746502] env[62522]: _type = "Task" [ 745.746502] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.754537] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415269, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.897320] env[62522]: DEBUG oslo_vmware.api [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415265, 'name': ReconfigVM_Task, 'duration_secs': 0.284234} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.897610] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Reconfigured VM instance instance-00000014 to attach disk [datastore2] 879354d3-7423-41e2-93f6-0d8d3a120170/879354d3-7423-41e2-93f6-0d8d3a120170.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 745.897872] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Updating instance '879354d3-7423-41e2-93f6-0d8d3a120170' progress to 50 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 745.922561] env[62522]: DEBUG oslo_vmware.api [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Task: {'id': task-2415261, 'name': PowerOnVM_Task, 'duration_secs': 1.769823} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.922980] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 745.923062] env[62522]: INFO nova.compute.manager [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Took 10.54 seconds to spawn the instance on the hypervisor. [ 745.923319] env[62522]: DEBUG nova.compute.manager [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 745.924131] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c318e7d-5e70-43bc-8955-620849e2c743 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.029315] env[62522]: DEBUG nova.compute.manager [req-260c1788-1c74-4e9c-aa63-9dea617a29ae req-0b75d5bd-d4d1-4ab4-9f6a-991fdf2b2c93 service nova] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Received event network-changed-e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 746.029528] env[62522]: DEBUG nova.compute.manager [req-260c1788-1c74-4e9c-aa63-9dea617a29ae req-0b75d5bd-d4d1-4ab4-9f6a-991fdf2b2c93 service nova] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Refreshing instance network info cache due to event network-changed-e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 746.029764] env[62522]: DEBUG oslo_concurrency.lockutils [req-260c1788-1c74-4e9c-aa63-9dea617a29ae req-0b75d5bd-d4d1-4ab4-9f6a-991fdf2b2c93 service nova] Acquiring lock "refresh_cache-d266aff3-42b4-4dcb-b8ca-7c13cdf8d314" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 746.029932] env[62522]: DEBUG oslo_concurrency.lockutils [req-260c1788-1c74-4e9c-aa63-9dea617a29ae req-0b75d5bd-d4d1-4ab4-9f6a-991fdf2b2c93 service nova] Acquired lock "refresh_cache-d266aff3-42b4-4dcb-b8ca-7c13cdf8d314" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.031767] env[62522]: DEBUG nova.network.neutron [req-260c1788-1c74-4e9c-aa63-9dea617a29ae req-0b75d5bd-d4d1-4ab4-9f6a-991fdf2b2c93 service nova] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Refreshing network info cache for port e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 746.104329] env[62522]: DEBUG nova.compute.manager [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 746.126846] env[62522]: DEBUG nova.scheduler.client.report [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 746.141661] env[62522]: DEBUG nova.virt.hardware [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 746.142185] env[62522]: DEBUG nova.virt.hardware [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 746.142185] env[62522]: DEBUG nova.virt.hardware [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 746.142185] env[62522]: DEBUG nova.virt.hardware [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 746.142344] env[62522]: DEBUG nova.virt.hardware [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 746.142415] env[62522]: DEBUG nova.virt.hardware [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 746.142628] env[62522]: DEBUG nova.virt.hardware [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 746.142788] env[62522]: DEBUG nova.virt.hardware [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 746.142962] env[62522]: DEBUG nova.virt.hardware [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 746.143216] env[62522]: DEBUG nova.virt.hardware [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 746.143405] env[62522]: DEBUG nova.virt.hardware [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 746.144438] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ce58b8-bf89-42f1-9c92-04b453a8fbb4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.153640] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ac67a6e-1e21-4b6c-87b3-3056dd5053db {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.219214] env[62522]: DEBUG oslo_vmware.api [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Task: {'id': task-2415267, 'name': ReconfigVM_Task, 'duration_secs': 0.3645} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.219499] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Reconfigured VM instance instance-0000001f to attach disk [datastore1] 5b69254a-b34b-48ff-a96c-d8573c9abf3b/5b69254a-b34b-48ff-a96c-d8573c9abf3b.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 746.220196] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7c14a7ef-5d71-42af-938b-90514a503426 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.227631] env[62522]: DEBUG oslo_vmware.api [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Waiting for the task: (returnval){ [ 746.227631] env[62522]: value = "task-2415270" [ 746.227631] env[62522]: _type = "Task" [ 746.227631] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.237278] env[62522]: DEBUG oslo_vmware.api [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Task: {'id': task-2415270, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.255096] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415269, 'name': CreateVM_Task, 'duration_secs': 0.428895} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.255311] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 746.255956] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 746.256152] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.256443] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 746.256676] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2dc0ec1-2506-4066-9c77-4a4eaa613010 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.261533] env[62522]: DEBUG oslo_vmware.api [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Waiting for the task: (returnval){ [ 746.261533] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5234ddd6-66c3-124c-93ea-5e86212d2dc9" [ 746.261533] env[62522]: _type = "Task" [ 746.261533] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.269838] env[62522]: DEBUG oslo_vmware.api [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5234ddd6-66c3-124c-93ea-5e86212d2dc9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.270227] env[62522]: INFO nova.compute.manager [None req-775a5aa3-df4f-4ddc-a8d7-e533f256233c tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Unrescuing [ 746.270439] env[62522]: DEBUG oslo_concurrency.lockutils [None req-775a5aa3-df4f-4ddc-a8d7-e533f256233c tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "refresh_cache-c181ce48-9fe2-4400-9047-f8b5a7159dd3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 746.270586] env[62522]: DEBUG oslo_concurrency.lockutils [None req-775a5aa3-df4f-4ddc-a8d7-e533f256233c tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquired lock "refresh_cache-c181ce48-9fe2-4400-9047-f8b5a7159dd3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.270751] env[62522]: DEBUG nova.network.neutron [None req-775a5aa3-df4f-4ddc-a8d7-e533f256233c tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 746.406413] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f68bad0f-3b25-498d-b2c7-df3334080a7f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.426142] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-668b0c09-6628-4886-abf8-1b68530f6b42 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.450820] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Updating instance '879354d3-7423-41e2-93f6-0d8d3a120170' progress to 67 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 746.452977] env[62522]: INFO nova.compute.manager [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Took 47.15 seconds to build instance. [ 746.599862] env[62522]: DEBUG nova.network.neutron [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Successfully updated port: e44d8202-0840-41f3-a86d-8baffc8c19dd {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 746.635712] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.584s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 746.636325] env[62522]: DEBUG nova.compute.manager [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 746.640415] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.248s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 746.640657] env[62522]: DEBUG nova.objects.instance [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Lazy-loading 'resources' on Instance uuid 253a2903-2601-4f0a-8882-e7510406f9d5 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 746.738438] env[62522]: DEBUG oslo_vmware.api [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Task: {'id': task-2415270, 'name': Rename_Task, 'duration_secs': 0.149569} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.738715] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 746.738989] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6a86610e-e9ab-4fd5-ab57-1832b04adb0e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.745387] env[62522]: DEBUG oslo_vmware.api [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Waiting for the task: (returnval){ [ 746.745387] env[62522]: value = "task-2415271" [ 746.745387] env[62522]: _type = "Task" [ 746.745387] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.755992] env[62522]: DEBUG oslo_vmware.api [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Task: {'id': task-2415271, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.773040] env[62522]: DEBUG oslo_vmware.api [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5234ddd6-66c3-124c-93ea-5e86212d2dc9, 'name': SearchDatastore_Task, 'duration_secs': 0.009985} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.773040] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 746.773040] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 746.773040] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 746.773438] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.773438] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 746.773438] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fd633100-7dd3-481e-98fc-a21bc656fb13 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.781189] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 746.781504] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 746.782400] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8d4184e-410f-419c-8ff2-6bf9f9d7ffd8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.789470] env[62522]: DEBUG oslo_vmware.api [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Waiting for the task: (returnval){ [ 746.789470] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526998c4-6af8-4a29-9679-c662b30e3c40" [ 746.789470] env[62522]: _type = "Task" [ 746.789470] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.798403] env[62522]: DEBUG oslo_vmware.api [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526998c4-6af8-4a29-9679-c662b30e3c40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.801666] env[62522]: DEBUG nova.network.neutron [req-260c1788-1c74-4e9c-aa63-9dea617a29ae req-0b75d5bd-d4d1-4ab4-9f6a-991fdf2b2c93 service nova] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Updated VIF entry in instance network info cache for port e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 746.802063] env[62522]: DEBUG nova.network.neutron [req-260c1788-1c74-4e9c-aa63-9dea617a29ae req-0b75d5bd-d4d1-4ab4-9f6a-991fdf2b2c93 service nova] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Updating instance_info_cache with network_info: [{"id": "e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55", "address": "fa:16:3e:94:f4:ed", "network": {"id": "eb0d2ded-859d-46b3-843d-bb580d0bfb6b", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-831675108-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0dba307f1fbf48bfac98d9836a72254e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0fac98f-bf", "ovs_interfaceid": "e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.863903] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c187d20f-d0c4-4b66-9598-0e8dbbbdbe17 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Acquiring lock "interface-194c1dd8-3b0a-4c29-9779-65f1534121d1-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 746.864350] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c187d20f-d0c4-4b66-9598-0e8dbbbdbe17 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Lock "interface-194c1dd8-3b0a-4c29-9779-65f1534121d1-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 746.864504] env[62522]: DEBUG nova.objects.instance [None req-c187d20f-d0c4-4b66-9598-0e8dbbbdbe17 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Lazy-loading 'flavor' on Instance uuid 194c1dd8-3b0a-4c29-9779-65f1534121d1 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 746.958715] env[62522]: DEBUG oslo_concurrency.lockutils [None req-20640067-7198-4b56-beef-6ef51842ae36 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Lock "194c1dd8-3b0a-4c29-9779-65f1534121d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.022s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 746.994185] env[62522]: DEBUG nova.network.neutron [None req-775a5aa3-df4f-4ddc-a8d7-e533f256233c tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Updating instance_info_cache with network_info: [{"id": "2e33c70f-036d-459c-a393-f570cbf7089c", "address": "fa:16:3e:58:55:4e", "network": {"id": "5f1d73d1-ff9e-4081-87cf-8df6294f67c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-892212702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "962664c996f24cf9ae192f79fae18ca4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "419a5b3f-4c6f-4168-9def-746b4d8c5c24", "external-id": "nsx-vlan-transportzone-656", "segmentation_id": 656, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e33c70f-03", "ovs_interfaceid": "2e33c70f-036d-459c-a393-f570cbf7089c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.004032] env[62522]: DEBUG nova.network.neutron [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Port cd619060-5655-434c-967f-7552adca021b binding to destination host cpu-1 is already ACTIVE {{(pid=62522) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 747.104577] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Acquiring lock "refresh_cache-c1fd078c-61d4-4c0f-8c49-0f56a926a087" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 747.104577] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Acquired lock "refresh_cache-c1fd078c-61d4-4c0f-8c49-0f56a926a087" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.104577] env[62522]: DEBUG nova.network.neutron [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 747.147932] env[62522]: DEBUG nova.compute.utils [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 747.149659] env[62522]: DEBUG nova.compute.manager [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 747.149831] env[62522]: DEBUG nova.network.neutron [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 747.199301] env[62522]: DEBUG nova.policy [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8eff9205ccb14bc89c4b1be13efd4a24', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ce263d67988b4448b181b122b9270155', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 747.257379] env[62522]: DEBUG oslo_vmware.api [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Task: {'id': task-2415271, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.302401] env[62522]: DEBUG oslo_vmware.api [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526998c4-6af8-4a29-9679-c662b30e3c40, 'name': SearchDatastore_Task, 'duration_secs': 0.009478} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.305825] env[62522]: DEBUG oslo_concurrency.lockutils [req-260c1788-1c74-4e9c-aa63-9dea617a29ae req-0b75d5bd-d4d1-4ab4-9f6a-991fdf2b2c93 service nova] Releasing lock "refresh_cache-d266aff3-42b4-4dcb-b8ca-7c13cdf8d314" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 747.306496] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bdc28cb9-e314-4cdf-b6d4-e9a395504eaf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.312412] env[62522]: DEBUG oslo_vmware.api [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Waiting for the task: (returnval){ [ 747.312412] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d64a4a-c51f-4401-428f-5a1979ee2e14" [ 747.312412] env[62522]: _type = "Task" [ 747.312412] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.321799] env[62522]: DEBUG oslo_vmware.api [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d64a4a-c51f-4401-428f-5a1979ee2e14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.369092] env[62522]: DEBUG nova.objects.instance [None req-c187d20f-d0c4-4b66-9598-0e8dbbbdbe17 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Lazy-loading 'pci_requests' on Instance uuid 194c1dd8-3b0a-4c29-9779-65f1534121d1 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 747.462064] env[62522]: DEBUG nova.compute.manager [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 747.498020] env[62522]: DEBUG oslo_concurrency.lockutils [None req-775a5aa3-df4f-4ddc-a8d7-e533f256233c tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Releasing lock "refresh_cache-c181ce48-9fe2-4400-9047-f8b5a7159dd3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 747.498020] env[62522]: DEBUG nova.objects.instance [None req-775a5aa3-df4f-4ddc-a8d7-e533f256233c tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lazy-loading 'flavor' on Instance uuid c181ce48-9fe2-4400-9047-f8b5a7159dd3 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 747.639546] env[62522]: DEBUG nova.network.neutron [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 747.653148] env[62522]: DEBUG nova.compute.manager [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 747.665645] env[62522]: DEBUG nova.network.neutron [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Successfully created port: 648bb769-184a-43cb-a66e-9b36814e4e4a {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 747.762213] env[62522]: DEBUG oslo_vmware.api [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Task: {'id': task-2415271, 'name': PowerOnVM_Task, 'duration_secs': 0.658051} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.764830] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 747.765062] env[62522]: INFO nova.compute.manager [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Took 9.67 seconds to spawn the instance on the hypervisor. [ 747.765253] env[62522]: DEBUG nova.compute.manager [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 747.766336] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d94d4c5-8cc5-4975-8ee6-faab93a56851 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.797636] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8877f46d-f430-4b08-aa50-bd80a6a873ce {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.804700] env[62522]: DEBUG nova.network.neutron [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Updating instance_info_cache with network_info: [{"id": "e44d8202-0840-41f3-a86d-8baffc8c19dd", "address": "fa:16:3e:bc:f2:43", "network": {"id": "896c53ad-3b58-4e2c-89d9-7fa723dc8e79", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-558196866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "04ba6295b89743a184cc64343ac6bbaf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape44d8202-08", "ovs_interfaceid": "e44d8202-0840-41f3-a86d-8baffc8c19dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.806695] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-045c5227-6234-4c16-9bb0-4acf75b7f913 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.841190] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1db43308-d247-40e2-ba64-e4aabb2cc624 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.849231] env[62522]: DEBUG oslo_vmware.api [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d64a4a-c51f-4401-428f-5a1979ee2e14, 'name': SearchDatastore_Task, 'duration_secs': 0.010263} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.851512] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 747.851777] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] d266aff3-42b4-4dcb-b8ca-7c13cdf8d314/d266aff3-42b4-4dcb-b8ca-7c13cdf8d314.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 747.852093] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d9cd544e-5350-44c6-ac3a-dd71cfff3029 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.854736] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4b34bfa-417b-47ad-b0e9-4d3e9347e479 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.864170] env[62522]: DEBUG oslo_vmware.api [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Waiting for the task: (returnval){ [ 747.864170] env[62522]: value = "task-2415272" [ 747.864170] env[62522]: _type = "Task" [ 747.864170] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.872453] env[62522]: DEBUG nova.objects.base [None req-c187d20f-d0c4-4b66-9598-0e8dbbbdbe17 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Object Instance<194c1dd8-3b0a-4c29-9779-65f1534121d1> lazy-loaded attributes: flavor,pci_requests {{(pid=62522) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 747.872650] env[62522]: DEBUG nova.network.neutron [None req-c187d20f-d0c4-4b66-9598-0e8dbbbdbe17 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 747.874556] env[62522]: DEBUG nova.compute.provider_tree [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 747.885980] env[62522]: DEBUG oslo_vmware.api [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Task: {'id': task-2415272, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.968960] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c187d20f-d0c4-4b66-9598-0e8dbbbdbe17 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Lock "interface-194c1dd8-3b0a-4c29-9779-65f1534121d1-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.105s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.986543] env[62522]: DEBUG oslo_concurrency.lockutils [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.007824] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2758fe91-c679-424d-8828-60d44d17f172 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.040776] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-775a5aa3-df4f-4ddc-a8d7-e533f256233c tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 748.043342] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4d50b75b-1ff6-4da4-8b0d-b50a64cce179 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.050579] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquiring lock "879354d3-7423-41e2-93f6-0d8d3a120170-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.050631] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "879354d3-7423-41e2-93f6-0d8d3a120170-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.051105] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "879354d3-7423-41e2-93f6-0d8d3a120170-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.057542] env[62522]: DEBUG oslo_vmware.api [None req-775a5aa3-df4f-4ddc-a8d7-e533f256233c tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 748.057542] env[62522]: value = "task-2415273" [ 748.057542] env[62522]: _type = "Task" [ 748.057542] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.067994] env[62522]: DEBUG oslo_vmware.api [None req-775a5aa3-df4f-4ddc-a8d7-e533f256233c tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415273, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.289671] env[62522]: INFO nova.compute.manager [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Took 48.31 seconds to build instance. [ 748.311163] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Releasing lock "refresh_cache-c1fd078c-61d4-4c0f-8c49-0f56a926a087" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 748.311629] env[62522]: DEBUG nova.compute.manager [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Instance network_info: |[{"id": "e44d8202-0840-41f3-a86d-8baffc8c19dd", "address": "fa:16:3e:bc:f2:43", "network": {"id": "896c53ad-3b58-4e2c-89d9-7fa723dc8e79", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-558196866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "04ba6295b89743a184cc64343ac6bbaf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape44d8202-08", "ovs_interfaceid": "e44d8202-0840-41f3-a86d-8baffc8c19dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 748.312390] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bc:f2:43', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '305ccd93-08cb-4658-845c-d9b64952daf7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e44d8202-0840-41f3-a86d-8baffc8c19dd', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 748.320813] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Creating folder: Project (04ba6295b89743a184cc64343ac6bbaf). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 748.321559] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b410f983-47d8-4278-8298-ae49bf0ef09e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.332205] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Created folder: Project (04ba6295b89743a184cc64343ac6bbaf) in parent group-v489562. [ 748.332405] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Creating folder: Instances. Parent ref: group-v489653. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 748.332660] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c6b700ea-2ec7-4af4-8610-c0700daefc8b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.342648] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Created folder: Instances in parent group-v489653. [ 748.342930] env[62522]: DEBUG oslo.service.loopingcall [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 748.343151] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 748.343368] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4d448b8f-acba-4b7b-bc7b-f5e8f059a33d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.365115] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 748.365115] env[62522]: value = "task-2415276" [ 748.365115] env[62522]: _type = "Task" [ 748.365115] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.374546] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415276, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.380820] env[62522]: DEBUG nova.scheduler.client.report [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 748.397177] env[62522]: DEBUG oslo_vmware.api [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Task: {'id': task-2415272, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.571248] env[62522]: DEBUG oslo_vmware.api [None req-775a5aa3-df4f-4ddc-a8d7-e533f256233c tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415273, 'name': PowerOffVM_Task, 'duration_secs': 0.44851} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.571539] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-775a5aa3-df4f-4ddc-a8d7-e533f256233c tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 748.576871] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-775a5aa3-df4f-4ddc-a8d7-e533f256233c tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Reconfiguring VM instance instance-00000016 to detach disk 2001 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 748.577479] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bf9e3484-ec24-458f-ac22-77b7d9d9709c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.596437] env[62522]: DEBUG oslo_vmware.api [None req-775a5aa3-df4f-4ddc-a8d7-e533f256233c tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 748.596437] env[62522]: value = "task-2415277" [ 748.596437] env[62522]: _type = "Task" [ 748.596437] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.605060] env[62522]: DEBUG oslo_vmware.api [None req-775a5aa3-df4f-4ddc-a8d7-e533f256233c tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415277, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.646847] env[62522]: DEBUG nova.compute.manager [req-8c6f8c7e-2e3b-4cf1-b1ce-9c60e74b8b83 req-073c77c8-1b9a-4ede-96bc-4c5aa158bd0a service nova] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Received event network-vif-plugged-e44d8202-0840-41f3-a86d-8baffc8c19dd {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 748.647107] env[62522]: DEBUG oslo_concurrency.lockutils [req-8c6f8c7e-2e3b-4cf1-b1ce-9c60e74b8b83 req-073c77c8-1b9a-4ede-96bc-4c5aa158bd0a service nova] Acquiring lock "c1fd078c-61d4-4c0f-8c49-0f56a926a087-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.647326] env[62522]: DEBUG oslo_concurrency.lockutils [req-8c6f8c7e-2e3b-4cf1-b1ce-9c60e74b8b83 req-073c77c8-1b9a-4ede-96bc-4c5aa158bd0a service nova] Lock "c1fd078c-61d4-4c0f-8c49-0f56a926a087-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.647496] env[62522]: DEBUG oslo_concurrency.lockutils [req-8c6f8c7e-2e3b-4cf1-b1ce-9c60e74b8b83 req-073c77c8-1b9a-4ede-96bc-4c5aa158bd0a service nova] Lock "c1fd078c-61d4-4c0f-8c49-0f56a926a087-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.647667] env[62522]: DEBUG nova.compute.manager [req-8c6f8c7e-2e3b-4cf1-b1ce-9c60e74b8b83 req-073c77c8-1b9a-4ede-96bc-4c5aa158bd0a service nova] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] No waiting events found dispatching network-vif-plugged-e44d8202-0840-41f3-a86d-8baffc8c19dd {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 748.647832] env[62522]: WARNING nova.compute.manager [req-8c6f8c7e-2e3b-4cf1-b1ce-9c60e74b8b83 req-073c77c8-1b9a-4ede-96bc-4c5aa158bd0a service nova] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Received unexpected event network-vif-plugged-e44d8202-0840-41f3-a86d-8baffc8c19dd for instance with vm_state building and task_state spawning. [ 748.647993] env[62522]: DEBUG nova.compute.manager [req-8c6f8c7e-2e3b-4cf1-b1ce-9c60e74b8b83 req-073c77c8-1b9a-4ede-96bc-4c5aa158bd0a service nova] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Received event network-changed-e44d8202-0840-41f3-a86d-8baffc8c19dd {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 748.648180] env[62522]: DEBUG nova.compute.manager [req-8c6f8c7e-2e3b-4cf1-b1ce-9c60e74b8b83 req-073c77c8-1b9a-4ede-96bc-4c5aa158bd0a service nova] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Refreshing instance network info cache due to event network-changed-e44d8202-0840-41f3-a86d-8baffc8c19dd. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 748.648331] env[62522]: DEBUG oslo_concurrency.lockutils [req-8c6f8c7e-2e3b-4cf1-b1ce-9c60e74b8b83 req-073c77c8-1b9a-4ede-96bc-4c5aa158bd0a service nova] Acquiring lock "refresh_cache-c1fd078c-61d4-4c0f-8c49-0f56a926a087" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.648465] env[62522]: DEBUG oslo_concurrency.lockutils [req-8c6f8c7e-2e3b-4cf1-b1ce-9c60e74b8b83 req-073c77c8-1b9a-4ede-96bc-4c5aa158bd0a service nova] Acquired lock "refresh_cache-c1fd078c-61d4-4c0f-8c49-0f56a926a087" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.648619] env[62522]: DEBUG nova.network.neutron [req-8c6f8c7e-2e3b-4cf1-b1ce-9c60e74b8b83 req-073c77c8-1b9a-4ede-96bc-4c5aa158bd0a service nova] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Refreshing network info cache for port e44d8202-0840-41f3-a86d-8baffc8c19dd {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 748.676796] env[62522]: DEBUG nova.compute.manager [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 748.702455] env[62522]: DEBUG nova.virt.hardware [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 748.702708] env[62522]: DEBUG nova.virt.hardware [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 748.702866] env[62522]: DEBUG nova.virt.hardware [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 748.703102] env[62522]: DEBUG nova.virt.hardware [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 748.703255] env[62522]: DEBUG nova.virt.hardware [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 748.703403] env[62522]: DEBUG nova.virt.hardware [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 748.703610] env[62522]: DEBUG nova.virt.hardware [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 748.703770] env[62522]: DEBUG nova.virt.hardware [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 748.703939] env[62522]: DEBUG nova.virt.hardware [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 748.704126] env[62522]: DEBUG nova.virt.hardware [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 748.704304] env[62522]: DEBUG nova.virt.hardware [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 748.705175] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46419d7d-3745-4017-9f48-167f1f718690 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.713311] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baabaa67-a0de-4b50-a6a3-64214707624a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.791637] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e5b020a8-886e-4c31-b2cb-bbc68f6767db tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Lock "5b69254a-b34b-48ff-a96c-d8573c9abf3b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.121s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.875629] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415276, 'name': CreateVM_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.884416] env[62522]: DEBUG oslo_vmware.api [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Task: {'id': task-2415272, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.588743} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.884740] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] d266aff3-42b4-4dcb-b8ca-7c13cdf8d314/d266aff3-42b4-4dcb-b8ca-7c13cdf8d314.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 748.885049] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 748.885388] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cca1ddf0-8454-4cdb-acab-98c800becdd5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.888137] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.248s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.890489] env[62522]: DEBUG oslo_concurrency.lockutils [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.176s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.892661] env[62522]: INFO nova.compute.claims [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 748.901604] env[62522]: DEBUG oslo_vmware.api [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Waiting for the task: (returnval){ [ 748.901604] env[62522]: value = "task-2415278" [ 748.901604] env[62522]: _type = "Task" [ 748.901604] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.914566] env[62522]: DEBUG oslo_vmware.api [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Task: {'id': task-2415278, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.917656] env[62522]: INFO nova.scheduler.client.report [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Deleted allocations for instance 253a2903-2601-4f0a-8882-e7510406f9d5 [ 749.106819] env[62522]: DEBUG oslo_vmware.api [None req-775a5aa3-df4f-4ddc-a8d7-e533f256233c tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415277, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.150536] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquiring lock "refresh_cache-879354d3-7423-41e2-93f6-0d8d3a120170" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.150786] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquired lock "refresh_cache-879354d3-7423-41e2-93f6-0d8d3a120170" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.150991] env[62522]: DEBUG nova.network.neutron [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 749.294800] env[62522]: DEBUG nova.compute.manager [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 749.357231] env[62522]: DEBUG nova.network.neutron [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Successfully updated port: 648bb769-184a-43cb-a66e-9b36814e4e4a {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 749.380358] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415276, 'name': CreateVM_Task, 'duration_secs': 0.678597} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.380543] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 749.381218] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.381383] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.381699] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 749.381953] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e3d5bba-9e97-486f-a726-71fc3b106874 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.386799] env[62522]: DEBUG oslo_vmware.api [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Waiting for the task: (returnval){ [ 749.386799] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bdd445-309b-45d8-5d48-28f34ed35843" [ 749.386799] env[62522]: _type = "Task" [ 749.386799] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.395251] env[62522]: DEBUG oslo_vmware.api [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bdd445-309b-45d8-5d48-28f34ed35843, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.409962] env[62522]: DEBUG oslo_vmware.api [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Task: {'id': task-2415278, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.269954} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.410257] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 749.411041] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edc9d64e-c48c-4dce-8ad5-8dc7257d40d8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.436538] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Reconfiguring VM instance instance-00000020 to attach disk [datastore2] d266aff3-42b4-4dcb-b8ca-7c13cdf8d314/d266aff3-42b4-4dcb-b8ca-7c13cdf8d314.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 749.437155] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fd8dfc35-2e62-45f1-be41-71cc2db36aa6 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404 tempest-FloatingIPsAssociationNegativeTestJSON-1640952404-project-member] Lock "253a2903-2601-4f0a-8882-e7510406f9d5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.780s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.441296] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d99d5636-9273-4afd-8cf9-1568296df657 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.467097] env[62522]: DEBUG oslo_vmware.api [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Waiting for the task: (returnval){ [ 749.467097] env[62522]: value = "task-2415279" [ 749.467097] env[62522]: _type = "Task" [ 749.467097] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.476946] env[62522]: DEBUG oslo_vmware.api [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Task: {'id': task-2415279, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.517662] env[62522]: DEBUG nova.network.neutron [req-8c6f8c7e-2e3b-4cf1-b1ce-9c60e74b8b83 req-073c77c8-1b9a-4ede-96bc-4c5aa158bd0a service nova] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Updated VIF entry in instance network info cache for port e44d8202-0840-41f3-a86d-8baffc8c19dd. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 749.518055] env[62522]: DEBUG nova.network.neutron [req-8c6f8c7e-2e3b-4cf1-b1ce-9c60e74b8b83 req-073c77c8-1b9a-4ede-96bc-4c5aa158bd0a service nova] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Updating instance_info_cache with network_info: [{"id": "e44d8202-0840-41f3-a86d-8baffc8c19dd", "address": "fa:16:3e:bc:f2:43", "network": {"id": "896c53ad-3b58-4e2c-89d9-7fa723dc8e79", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-558196866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "04ba6295b89743a184cc64343ac6bbaf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape44d8202-08", "ovs_interfaceid": "e44d8202-0840-41f3-a86d-8baffc8c19dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.609907] env[62522]: DEBUG oslo_vmware.api [None req-775a5aa3-df4f-4ddc-a8d7-e533f256233c tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415277, 'name': ReconfigVM_Task, 'duration_secs': 0.742103} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.610235] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-775a5aa3-df4f-4ddc-a8d7-e533f256233c tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Reconfigured VM instance instance-00000016 to detach disk 2001 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 749.610421] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-775a5aa3-df4f-4ddc-a8d7-e533f256233c tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 749.610665] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4df6ec01-f403-459f-ba67-85f44720884a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.616624] env[62522]: DEBUG oslo_vmware.api [None req-775a5aa3-df4f-4ddc-a8d7-e533f256233c tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 749.616624] env[62522]: value = "task-2415280" [ 749.616624] env[62522]: _type = "Task" [ 749.616624] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.624801] env[62522]: DEBUG oslo_vmware.api [None req-775a5aa3-df4f-4ddc-a8d7-e533f256233c tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415280, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.640158] env[62522]: DEBUG nova.compute.manager [req-d73d65ba-1d61-4cc9-ac68-1113253f71cf req-f561d737-77e5-42bd-8089-6756bffaa40f service nova] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Received event network-vif-plugged-648bb769-184a-43cb-a66e-9b36814e4e4a {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 749.640158] env[62522]: DEBUG oslo_concurrency.lockutils [req-d73d65ba-1d61-4cc9-ac68-1113253f71cf req-f561d737-77e5-42bd-8089-6756bffaa40f service nova] Acquiring lock "e813e7da-fd2c-4f10-b2f3-1e2b5c153a19-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 749.640158] env[62522]: DEBUG oslo_concurrency.lockutils [req-d73d65ba-1d61-4cc9-ac68-1113253f71cf req-f561d737-77e5-42bd-8089-6756bffaa40f service nova] Lock "e813e7da-fd2c-4f10-b2f3-1e2b5c153a19-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 749.640158] env[62522]: DEBUG oslo_concurrency.lockutils [req-d73d65ba-1d61-4cc9-ac68-1113253f71cf req-f561d737-77e5-42bd-8089-6756bffaa40f service nova] Lock "e813e7da-fd2c-4f10-b2f3-1e2b5c153a19-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.640445] env[62522]: DEBUG nova.compute.manager [req-d73d65ba-1d61-4cc9-ac68-1113253f71cf req-f561d737-77e5-42bd-8089-6756bffaa40f service nova] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] No waiting events found dispatching network-vif-plugged-648bb769-184a-43cb-a66e-9b36814e4e4a {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 749.640445] env[62522]: WARNING nova.compute.manager [req-d73d65ba-1d61-4cc9-ac68-1113253f71cf req-f561d737-77e5-42bd-8089-6756bffaa40f service nova] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Received unexpected event network-vif-plugged-648bb769-184a-43cb-a66e-9b36814e4e4a for instance with vm_state building and task_state spawning. [ 749.820359] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 749.860457] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Acquiring lock "refresh_cache-e813e7da-fd2c-4f10-b2f3-1e2b5c153a19" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.860597] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Acquired lock "refresh_cache-e813e7da-fd2c-4f10-b2f3-1e2b5c153a19" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.860827] env[62522]: DEBUG nova.network.neutron [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 749.911160] env[62522]: DEBUG oslo_vmware.api [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bdd445-309b-45d8-5d48-28f34ed35843, 'name': SearchDatastore_Task, 'duration_secs': 0.011326} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.911577] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 749.911833] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 749.912068] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.912211] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.912411] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 749.912697] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-12ffc7ca-bec4-4e91-ab51-55a0fd928d1d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.922493] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 749.922715] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 749.923548] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b621c61c-6677-4d01-b86c-cae56de1ce75 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.933713] env[62522]: DEBUG oslo_vmware.api [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Waiting for the task: (returnval){ [ 749.933713] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5259183b-5b20-f5b0-d903-141a99d71e46" [ 749.933713] env[62522]: _type = "Task" [ 749.933713] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.949611] env[62522]: DEBUG oslo_vmware.api [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5259183b-5b20-f5b0-d903-141a99d71e46, 'name': SearchDatastore_Task, 'duration_secs': 0.010136} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.950475] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ebdc57b-eef1-40a4-9a59-b1f778037f0f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.961755] env[62522]: DEBUG oslo_vmware.api [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Waiting for the task: (returnval){ [ 749.961755] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52458d71-fd46-9b0c-5425-23854b797899" [ 749.961755] env[62522]: _type = "Task" [ 749.961755] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.968367] env[62522]: DEBUG oslo_vmware.api [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52458d71-fd46-9b0c-5425-23854b797899, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.981113] env[62522]: DEBUG oslo_vmware.api [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Task: {'id': task-2415279, 'name': ReconfigVM_Task, 'duration_secs': 0.49382} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.981426] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Reconfigured VM instance instance-00000020 to attach disk [datastore2] d266aff3-42b4-4dcb-b8ca-7c13cdf8d314/d266aff3-42b4-4dcb-b8ca-7c13cdf8d314.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 749.982021] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b62f4713-8a46-4031-941e-7c10118f6e9b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.989673] env[62522]: DEBUG oslo_vmware.api [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Waiting for the task: (returnval){ [ 749.989673] env[62522]: value = "task-2415281" [ 749.989673] env[62522]: _type = "Task" [ 749.989673] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.998924] env[62522]: DEBUG oslo_vmware.api [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Task: {'id': task-2415281, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.021021] env[62522]: DEBUG oslo_concurrency.lockutils [req-8c6f8c7e-2e3b-4cf1-b1ce-9c60e74b8b83 req-073c77c8-1b9a-4ede-96bc-4c5aa158bd0a service nova] Releasing lock "refresh_cache-c1fd078c-61d4-4c0f-8c49-0f56a926a087" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.035166] env[62522]: DEBUG nova.network.neutron [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Updating instance_info_cache with network_info: [{"id": "cd619060-5655-434c-967f-7552adca021b", "address": "fa:16:3e:7d:62:dc", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd619060-56", "ovs_interfaceid": "cd619060-5655-434c-967f-7552adca021b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.128978] env[62522]: DEBUG oslo_vmware.api [None req-775a5aa3-df4f-4ddc-a8d7-e533f256233c tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415280, 'name': PowerOnVM_Task, 'duration_secs': 0.405347} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.132247] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-775a5aa3-df4f-4ddc-a8d7-e533f256233c tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 750.132509] env[62522]: DEBUG nova.compute.manager [None req-775a5aa3-df4f-4ddc-a8d7-e533f256233c tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 750.134384] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a054be-dc94-409f-8e11-bcd1bd71eb23 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.262607] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Acquiring lock "194c1dd8-3b0a-4c29-9779-65f1534121d1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.263217] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Lock "194c1dd8-3b0a-4c29-9779-65f1534121d1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 750.263454] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Acquiring lock "194c1dd8-3b0a-4c29-9779-65f1534121d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.263929] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Lock "194c1dd8-3b0a-4c29-9779-65f1534121d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 750.263929] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Lock "194c1dd8-3b0a-4c29-9779-65f1534121d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 750.267443] env[62522]: INFO nova.compute.manager [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Terminating instance [ 750.425951] env[62522]: DEBUG nova.network.neutron [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 750.452900] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32ae378e-9c9d-4492-b70c-fb2aa40375b6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.464641] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37540164-0629-46c6-a5a7-53eb31b44aa7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.474281] env[62522]: DEBUG oslo_vmware.api [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52458d71-fd46-9b0c-5425-23854b797899, 'name': SearchDatastore_Task, 'duration_secs': 0.011301} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.504187] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.504545] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] c1fd078c-61d4-4c0f-8c49-0f56a926a087/c1fd078c-61d4-4c0f-8c49-0f56a926a087.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 750.507623] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-506ed17c-22bb-4beb-84ba-587bb325be6b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.513076] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13f183d8-6854-4009-974d-94e726791adf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.523996] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-309da5f9-2073-43c2-a30d-8bf82d9f2dd3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.528097] env[62522]: DEBUG oslo_vmware.api [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Task: {'id': task-2415281, 'name': Rename_Task, 'duration_secs': 0.212994} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.529469] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 750.529941] env[62522]: DEBUG oslo_vmware.api [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Waiting for the task: (returnval){ [ 750.529941] env[62522]: value = "task-2415282" [ 750.529941] env[62522]: _type = "Task" [ 750.529941] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.530652] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b643c0a3-1079-4749-bee2-c6aebfecc670 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.547442] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Releasing lock "refresh_cache-879354d3-7423-41e2-93f6-0d8d3a120170" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.551730] env[62522]: DEBUG nova.compute.provider_tree [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 750.559759] env[62522]: DEBUG oslo_vmware.api [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': task-2415282, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.564515] env[62522]: DEBUG oslo_vmware.api [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Waiting for the task: (returnval){ [ 750.564515] env[62522]: value = "task-2415283" [ 750.564515] env[62522]: _type = "Task" [ 750.564515] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.576125] env[62522]: DEBUG oslo_vmware.api [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Task: {'id': task-2415283, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.681429] env[62522]: DEBUG nova.network.neutron [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Updating instance_info_cache with network_info: [{"id": "648bb769-184a-43cb-a66e-9b36814e4e4a", "address": "fa:16:3e:c0:ce:1b", "network": {"id": "6be1ee4a-ede6-491a-8cf1-6f237b02c850", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1134150376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce263d67988b4448b181b122b9270155", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f49a7d-c6e5-404f-b71a-91d8c070cd18", "external-id": "nsx-vlan-transportzone-120", "segmentation_id": 120, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap648bb769-18", "ovs_interfaceid": "648bb769-184a-43cb-a66e-9b36814e4e4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.695672] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Acquiring lock "5b69254a-b34b-48ff-a96c-d8573c9abf3b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.695672] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Lock "5b69254a-b34b-48ff-a96c-d8573c9abf3b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 750.695672] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Acquiring lock "5b69254a-b34b-48ff-a96c-d8573c9abf3b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.695672] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Lock "5b69254a-b34b-48ff-a96c-d8573c9abf3b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 750.695930] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Lock "5b69254a-b34b-48ff-a96c-d8573c9abf3b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 750.702168] env[62522]: INFO nova.compute.manager [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Terminating instance [ 750.778422] env[62522]: DEBUG nova.compute.manager [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 750.778757] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 750.780110] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb4c8cb2-aa02-4062-937b-a8cbdb982b7b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.790507] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 750.790507] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e98aab39-00d9-4d4c-8927-5898bdf6c6eb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.798679] env[62522]: DEBUG oslo_vmware.api [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Waiting for the task: (returnval){ [ 750.798679] env[62522]: value = "task-2415284" [ 750.798679] env[62522]: _type = "Task" [ 750.798679] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.807527] env[62522]: DEBUG oslo_vmware.api [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Task: {'id': task-2415284, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.042772] env[62522]: DEBUG oslo_vmware.api [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': task-2415282, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.503428} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.042772] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] c1fd078c-61d4-4c0f-8c49-0f56a926a087/c1fd078c-61d4-4c0f-8c49-0f56a926a087.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 751.043027] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 751.043129] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-514d4527-4c85-449b-8311-d698d2958370 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.049605] env[62522]: DEBUG oslo_vmware.api [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Waiting for the task: (returnval){ [ 751.049605] env[62522]: value = "task-2415285" [ 751.049605] env[62522]: _type = "Task" [ 751.049605] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.059168] env[62522]: DEBUG nova.scheduler.client.report [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 751.078588] env[62522]: DEBUG oslo_vmware.api [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': task-2415285, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.084623] env[62522]: DEBUG oslo_vmware.api [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Task: {'id': task-2415283, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.093281] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d70326b-49f3-41e1-b258-2acf8a87d4a4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.115273] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db306f99-2a67-4f90-9ff1-dfa35924d2fd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.123090] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Updating instance '879354d3-7423-41e2-93f6-0d8d3a120170' progress to 83 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 751.183974] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Releasing lock "refresh_cache-e813e7da-fd2c-4f10-b2f3-1e2b5c153a19" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 751.184379] env[62522]: DEBUG nova.compute.manager [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Instance network_info: |[{"id": "648bb769-184a-43cb-a66e-9b36814e4e4a", "address": "fa:16:3e:c0:ce:1b", "network": {"id": "6be1ee4a-ede6-491a-8cf1-6f237b02c850", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1134150376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce263d67988b4448b181b122b9270155", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f49a7d-c6e5-404f-b71a-91d8c070cd18", "external-id": "nsx-vlan-transportzone-120", "segmentation_id": 120, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap648bb769-18", "ovs_interfaceid": "648bb769-184a-43cb-a66e-9b36814e4e4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 751.185089] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c0:ce:1b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f5f49a7d-c6e5-404f-b71a-91d8c070cd18', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '648bb769-184a-43cb-a66e-9b36814e4e4a', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 751.194058] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Creating folder: Project (ce263d67988b4448b181b122b9270155). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 751.194563] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5298df3b-f033-4566-8a89-24ba8cca3a8c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.205581] env[62522]: DEBUG nova.compute.manager [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 751.205787] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 751.206721] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Created folder: Project (ce263d67988b4448b181b122b9270155) in parent group-v489562. [ 751.206948] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Creating folder: Instances. Parent ref: group-v489656. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 751.207744] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f0e44a0-a9f3-439c-8d83-8859e2afc53d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.210413] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e04f60e6-594d-405a-a49f-1e8eccaa6259 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.217439] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 751.217683] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b5b15eb7-3994-42fd-b8c6-31bfeff923bf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.221736] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Created folder: Instances in parent group-v489656. [ 751.221990] env[62522]: DEBUG oslo.service.loopingcall [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 751.222189] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 751.222380] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-abd542d3-573e-4a78-af50-0305691a33ef {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.238473] env[62522]: DEBUG oslo_vmware.api [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Waiting for the task: (returnval){ [ 751.238473] env[62522]: value = "task-2415288" [ 751.238473] env[62522]: _type = "Task" [ 751.238473] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.243821] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 751.243821] env[62522]: value = "task-2415289" [ 751.243821] env[62522]: _type = "Task" [ 751.243821] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.249896] env[62522]: DEBUG oslo_vmware.api [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Task: {'id': task-2415288, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.254512] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415289, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.309396] env[62522]: DEBUG oslo_vmware.api [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Task: {'id': task-2415284, 'name': PowerOffVM_Task, 'duration_secs': 0.319154} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.309396] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 751.309396] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 751.309660] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f6efc64d-b1ee-4712-a232-faa4094a6801 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.384561] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 751.384915] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 751.385222] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Deleting the datastore file [datastore1] 194c1dd8-3b0a-4c29-9779-65f1534121d1 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 751.385522] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c64eceed-078e-4450-b920-dd071521b54a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.395348] env[62522]: DEBUG oslo_vmware.api [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Waiting for the task: (returnval){ [ 751.395348] env[62522]: value = "task-2415291" [ 751.395348] env[62522]: _type = "Task" [ 751.395348] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.407567] env[62522]: DEBUG oslo_vmware.api [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Task: {'id': task-2415291, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.563535] env[62522]: DEBUG oslo_vmware.api [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': task-2415285, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068905} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.564017] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 751.564927] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73f86b93-a029-4668-8dd9-8aa1d6cb30d2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.568660] env[62522]: DEBUG oslo_concurrency.lockutils [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.678s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 751.569296] env[62522]: DEBUG nova.compute.manager [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 751.572350] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d2fde235-ff93-4146-bd0e-9f71f24424f9 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.585s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 751.572570] env[62522]: DEBUG nova.objects.instance [None req-d2fde235-ff93-4146-bd0e-9f71f24424f9 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Lazy-loading 'resources' on Instance uuid 17e1557d-e4cf-45b0-84da-4cbcffe31fb6 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 751.604754] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Reconfiguring VM instance instance-00000021 to attach disk [datastore2] c1fd078c-61d4-4c0f-8c49-0f56a926a087/c1fd078c-61d4-4c0f-8c49-0f56a926a087.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 751.607291] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f084464e-1340-47e8-8146-44f114cd2cce {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.626042] env[62522]: DEBUG oslo_vmware.api [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Task: {'id': task-2415283, 'name': PowerOnVM_Task} progress is 90%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.632032] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 751.632032] env[62522]: DEBUG oslo_vmware.api [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Waiting for the task: (returnval){ [ 751.632032] env[62522]: value = "task-2415292" [ 751.632032] env[62522]: _type = "Task" [ 751.632032] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.632032] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-02ae5329-cad6-40fb-8604-292d2bee5eca {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.643651] env[62522]: DEBUG oslo_vmware.api [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': task-2415292, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.644124] env[62522]: DEBUG oslo_vmware.api [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 751.644124] env[62522]: value = "task-2415293" [ 751.644124] env[62522]: _type = "Task" [ 751.644124] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.653031] env[62522]: DEBUG oslo_vmware.api [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415293, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.693801] env[62522]: DEBUG nova.compute.manager [req-55894548-0ede-46fd-abea-72068af0895c req-812b29ed-ce6a-4340-b790-67abce3f99f1 service nova] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Received event network-changed-648bb769-184a-43cb-a66e-9b36814e4e4a {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 751.693974] env[62522]: DEBUG nova.compute.manager [req-55894548-0ede-46fd-abea-72068af0895c req-812b29ed-ce6a-4340-b790-67abce3f99f1 service nova] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Refreshing instance network info cache due to event network-changed-648bb769-184a-43cb-a66e-9b36814e4e4a. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 751.694204] env[62522]: DEBUG oslo_concurrency.lockutils [req-55894548-0ede-46fd-abea-72068af0895c req-812b29ed-ce6a-4340-b790-67abce3f99f1 service nova] Acquiring lock "refresh_cache-e813e7da-fd2c-4f10-b2f3-1e2b5c153a19" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.694343] env[62522]: DEBUG oslo_concurrency.lockutils [req-55894548-0ede-46fd-abea-72068af0895c req-812b29ed-ce6a-4340-b790-67abce3f99f1 service nova] Acquired lock "refresh_cache-e813e7da-fd2c-4f10-b2f3-1e2b5c153a19" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.694726] env[62522]: DEBUG nova.network.neutron [req-55894548-0ede-46fd-abea-72068af0895c req-812b29ed-ce6a-4340-b790-67abce3f99f1 service nova] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Refreshing network info cache for port 648bb769-184a-43cb-a66e-9b36814e4e4a {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 751.749015] env[62522]: DEBUG oslo_vmware.api [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Task: {'id': task-2415288, 'name': PowerOffVM_Task, 'duration_secs': 0.215738} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.751751] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 751.751997] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 751.752301] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0fa86784-ff7d-410d-b0de-02feb492b658 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.758962] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415289, 'name': CreateVM_Task, 'duration_secs': 0.372699} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.759187] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 751.759889] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.760128] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.760487] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 751.760774] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b51c823-37b9-44d7-8efb-34ae71c339ce {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.766118] env[62522]: DEBUG oslo_vmware.api [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Waiting for the task: (returnval){ [ 751.766118] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5213b31c-8e1c-7bba-07a7-995878c20667" [ 751.766118] env[62522]: _type = "Task" [ 751.766118] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.775240] env[62522]: DEBUG oslo_vmware.api [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5213b31c-8e1c-7bba-07a7-995878c20667, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.819276] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 751.819663] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 751.819990] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Deleting the datastore file [datastore1] 5b69254a-b34b-48ff-a96c-d8573c9abf3b {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 751.820484] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c5cb1851-c61f-42e0-a923-fa213f93500f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.829320] env[62522]: DEBUG oslo_vmware.api [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Waiting for the task: (returnval){ [ 751.829320] env[62522]: value = "task-2415295" [ 751.829320] env[62522]: _type = "Task" [ 751.829320] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.840987] env[62522]: DEBUG oslo_vmware.api [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Task: {'id': task-2415295, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.906784] env[62522]: DEBUG oslo_vmware.api [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Task: {'id': task-2415291, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138258} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.906784] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 751.906784] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 751.906784] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 751.906784] env[62522]: INFO nova.compute.manager [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Took 1.13 seconds to destroy the instance on the hypervisor. [ 751.906951] env[62522]: DEBUG oslo.service.loopingcall [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 751.907318] env[62522]: DEBUG nova.compute.manager [-] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 751.907318] env[62522]: DEBUG nova.network.neutron [-] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 752.079922] env[62522]: DEBUG nova.compute.utils [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 752.084146] env[62522]: DEBUG nova.compute.manager [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 752.084146] env[62522]: DEBUG nova.network.neutron [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 752.095832] env[62522]: DEBUG oslo_vmware.api [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Task: {'id': task-2415283, 'name': PowerOnVM_Task, 'duration_secs': 1.107866} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.099733] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 752.099733] env[62522]: INFO nova.compute.manager [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Took 8.58 seconds to spawn the instance on the hypervisor. [ 752.099733] env[62522]: DEBUG nova.compute.manager [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 752.099733] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-404943fb-0995-4cbd-aca4-fac57792034a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.153387] env[62522]: DEBUG oslo_vmware.api [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': task-2415292, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.165161] env[62522]: DEBUG oslo_vmware.api [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415293, 'name': PowerOnVM_Task, 'duration_secs': 0.430507} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.166391] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 752.166587] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c2cba6-7bce-4c4f-8165-39e12552ca2d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Updating instance '879354d3-7423-41e2-93f6-0d8d3a120170' progress to 100 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 752.189892] env[62522]: DEBUG nova.policy [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9694ee575d094ccf845eb57acf3e70c6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '00b27498c07344d1bf9cecefa0fca033', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 752.280943] env[62522]: DEBUG oslo_vmware.api [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5213b31c-8e1c-7bba-07a7-995878c20667, 'name': SearchDatastore_Task, 'duration_secs': 0.012716} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.281280] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 752.281632] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 752.281741] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 752.281885] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.282082] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 752.288962] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a0002e64-c3ab-4791-8fce-126aefd6b95b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.300295] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 752.300780] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 752.301436] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e8979c2-54fb-4a2b-8bfe-4ce186ce29c1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.310212] env[62522]: DEBUG oslo_vmware.api [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Waiting for the task: (returnval){ [ 752.310212] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526aed54-ad12-7f7d-cb34-195dcedb6b8a" [ 752.310212] env[62522]: _type = "Task" [ 752.310212] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.323823] env[62522]: DEBUG oslo_vmware.api [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526aed54-ad12-7f7d-cb34-195dcedb6b8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.341233] env[62522]: DEBUG oslo_vmware.api [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Task: {'id': task-2415295, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.295555} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.341796] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 752.342035] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 752.342271] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 752.342483] env[62522]: INFO nova.compute.manager [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Took 1.14 seconds to destroy the instance on the hypervisor. [ 752.342804] env[62522]: DEBUG oslo.service.loopingcall [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 752.346261] env[62522]: DEBUG nova.compute.manager [-] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 752.346409] env[62522]: DEBUG nova.network.neutron [-] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 752.586988] env[62522]: DEBUG nova.compute.manager [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 752.623141] env[62522]: INFO nova.compute.manager [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Took 47.42 seconds to build instance. [ 752.651837] env[62522]: DEBUG oslo_vmware.api [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': task-2415292, 'name': ReconfigVM_Task, 'duration_secs': 0.697348} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.652143] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Reconfigured VM instance instance-00000021 to attach disk [datastore2] c1fd078c-61d4-4c0f-8c49-0f56a926a087/c1fd078c-61d4-4c0f-8c49-0f56a926a087.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 752.654042] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8d84c571-2cdd-42f8-af2f-17bdd0bbd527 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.661042] env[62522]: DEBUG oslo_vmware.api [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Waiting for the task: (returnval){ [ 752.661042] env[62522]: value = "task-2415296" [ 752.661042] env[62522]: _type = "Task" [ 752.661042] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.677518] env[62522]: DEBUG oslo_vmware.api [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': task-2415296, 'name': Rename_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.684908] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3ed7927-4994-44a7-a115-3c42663c7ba0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.696217] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ac8b308-849a-43fc-87f1-d1f013e64d77 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.729449] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87727395-4192-403e-9d50-c4512877ea04 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.737024] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8633799b-b615-4b8e-ac3b-8653fee4df14 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.752826] env[62522]: DEBUG nova.compute.provider_tree [None req-d2fde235-ff93-4146-bd0e-9f71f24424f9 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 752.779444] env[62522]: DEBUG nova.compute.manager [req-8a732aad-17d4-4c5d-a1d3-7c521618214e req-76cba639-baeb-4310-ab0f-f804bb08256f service nova] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Received event network-vif-deleted-1a476d07-4a16-4431-ba2e-bb302475c1f8 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 752.779650] env[62522]: INFO nova.compute.manager [req-8a732aad-17d4-4c5d-a1d3-7c521618214e req-76cba639-baeb-4310-ab0f-f804bb08256f service nova] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Neutron deleted interface 1a476d07-4a16-4431-ba2e-bb302475c1f8; detaching it from the instance and deleting it from the info cache [ 752.779856] env[62522]: DEBUG nova.network.neutron [req-8a732aad-17d4-4c5d-a1d3-7c521618214e req-76cba639-baeb-4310-ab0f-f804bb08256f service nova] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.822345] env[62522]: DEBUG oslo_vmware.api [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526aed54-ad12-7f7d-cb34-195dcedb6b8a, 'name': SearchDatastore_Task, 'duration_secs': 0.012445} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.823187] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca34f1ca-80ae-4117-a3e5-c1d49a030ae8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.829598] env[62522]: DEBUG oslo_vmware.api [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Waiting for the task: (returnval){ [ 752.829598] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f605c7-2ced-100a-d155-dd3de6fc93f6" [ 752.829598] env[62522]: _type = "Task" [ 752.829598] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.839115] env[62522]: DEBUG oslo_vmware.api [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f605c7-2ced-100a-d155-dd3de6fc93f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.900565] env[62522]: DEBUG nova.network.neutron [-] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.088722] env[62522]: DEBUG nova.network.neutron [req-55894548-0ede-46fd-abea-72068af0895c req-812b29ed-ce6a-4340-b790-67abce3f99f1 service nova] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Updated VIF entry in instance network info cache for port 648bb769-184a-43cb-a66e-9b36814e4e4a. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 753.089168] env[62522]: DEBUG nova.network.neutron [req-55894548-0ede-46fd-abea-72068af0895c req-812b29ed-ce6a-4340-b790-67abce3f99f1 service nova] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Updating instance_info_cache with network_info: [{"id": "648bb769-184a-43cb-a66e-9b36814e4e4a", "address": "fa:16:3e:c0:ce:1b", "network": {"id": "6be1ee4a-ede6-491a-8cf1-6f237b02c850", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1134150376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce263d67988b4448b181b122b9270155", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f49a7d-c6e5-404f-b71a-91d8c070cd18", "external-id": "nsx-vlan-transportzone-120", "segmentation_id": 120, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap648bb769-18", "ovs_interfaceid": "648bb769-184a-43cb-a66e-9b36814e4e4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.126705] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2609b8c8-13ac-491e-a2cb-29e6ffada3eb tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Lock "d266aff3-42b4-4dcb-b8ca-7c13cdf8d314" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.309s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.172011] env[62522]: DEBUG oslo_vmware.api [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': task-2415296, 'name': Rename_Task, 'duration_secs': 0.374636} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.172011] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 753.172011] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-63ac55a7-8c7a-4a24-8a96-9d849836221a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.178250] env[62522]: DEBUG oslo_vmware.api [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Waiting for the task: (returnval){ [ 753.178250] env[62522]: value = "task-2415297" [ 753.178250] env[62522]: _type = "Task" [ 753.178250] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.197926] env[62522]: DEBUG oslo_vmware.api [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': task-2415297, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.257338] env[62522]: DEBUG nova.scheduler.client.report [None req-d2fde235-ff93-4146-bd0e-9f71f24424f9 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 753.283271] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5ab68a98-6ef8-4e2a-a3c3-5d92fa1eb12c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.296268] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7734b490-c362-4e45-ae47-734c5cc6cf5e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.322141] env[62522]: DEBUG nova.network.neutron [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Successfully created port: 9f1e209f-6bc2-4b96-9c5d-830ee01139b8 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 753.339130] env[62522]: DEBUG nova.compute.manager [req-8a732aad-17d4-4c5d-a1d3-7c521618214e req-76cba639-baeb-4310-ab0f-f804bb08256f service nova] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Detach interface failed, port_id=1a476d07-4a16-4431-ba2e-bb302475c1f8, reason: Instance 194c1dd8-3b0a-4c29-9779-65f1534121d1 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 753.348854] env[62522]: DEBUG oslo_vmware.api [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f605c7-2ced-100a-d155-dd3de6fc93f6, 'name': SearchDatastore_Task, 'duration_secs': 0.02361} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.349177] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 753.349520] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] e813e7da-fd2c-4f10-b2f3-1e2b5c153a19/e813e7da-fd2c-4f10-b2f3-1e2b5c153a19.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 753.349801] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eb49a476-6b69-4ba8-9b9c-7d7aef747731 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.357207] env[62522]: DEBUG oslo_vmware.api [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Waiting for the task: (returnval){ [ 753.357207] env[62522]: value = "task-2415298" [ 753.357207] env[62522]: _type = "Task" [ 753.357207] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.365371] env[62522]: DEBUG oslo_vmware.api [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415298, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.402924] env[62522]: INFO nova.compute.manager [-] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Took 1.50 seconds to deallocate network for instance. [ 753.594736] env[62522]: DEBUG oslo_concurrency.lockutils [req-55894548-0ede-46fd-abea-72068af0895c req-812b29ed-ce6a-4340-b790-67abce3f99f1 service nova] Releasing lock "refresh_cache-e813e7da-fd2c-4f10-b2f3-1e2b5c153a19" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 753.595831] env[62522]: DEBUG nova.compute.manager [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 753.622160] env[62522]: DEBUG nova.virt.hardware [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 753.622422] env[62522]: DEBUG nova.virt.hardware [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 753.622579] env[62522]: DEBUG nova.virt.hardware [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 753.622764] env[62522]: DEBUG nova.virt.hardware [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 753.622913] env[62522]: DEBUG nova.virt.hardware [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 753.623173] env[62522]: DEBUG nova.virt.hardware [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 753.623476] env[62522]: DEBUG nova.virt.hardware [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 753.623773] env[62522]: DEBUG nova.virt.hardware [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 753.623909] env[62522]: DEBUG nova.virt.hardware [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 753.624129] env[62522]: DEBUG nova.virt.hardware [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 753.624368] env[62522]: DEBUG nova.virt.hardware [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 753.625328] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edb52474-e8b5-472a-b294-850c16468526 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.634098] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c60db023-6322-470d-b4e1-0df04cf0ff01 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.639282] env[62522]: DEBUG nova.compute.manager [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 753.704260] env[62522]: DEBUG oslo_vmware.api [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': task-2415297, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.767282] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d2fde235-ff93-4146-bd0e-9f71f24424f9 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.195s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.770194] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.645s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 753.774023] env[62522]: INFO nova.compute.claims [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 753.793872] env[62522]: DEBUG nova.network.neutron [-] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.796399] env[62522]: INFO nova.scheduler.client.report [None req-d2fde235-ff93-4146-bd0e-9f71f24424f9 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Deleted allocations for instance 17e1557d-e4cf-45b0-84da-4cbcffe31fb6 [ 753.870903] env[62522]: DEBUG oslo_vmware.api [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415298, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.910713] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.166477] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.178705] env[62522]: DEBUG nova.compute.manager [req-587d98af-eec2-4c08-a57e-b8c73fa937a2 req-9ac1a956-d092-49f4-9985-90443983932c service nova] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Received event network-vif-deleted-00fd23e6-10da-4963-a366-1bec61020dd9 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 754.190538] env[62522]: DEBUG oslo_vmware.api [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': task-2415297, 'name': PowerOnVM_Task, 'duration_secs': 0.522759} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.192598] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 754.192598] env[62522]: INFO nova.compute.manager [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Took 8.09 seconds to spawn the instance on the hypervisor. [ 754.192598] env[62522]: DEBUG nova.compute.manager [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 754.192598] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ac4ae13-c65b-481d-be28-8709ae5df53f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.301914] env[62522]: INFO nova.compute.manager [-] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Took 1.96 seconds to deallocate network for instance. [ 754.312117] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d2fde235-ff93-4146-bd0e-9f71f24424f9 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Lock "17e1557d-e4cf-45b0-84da-4cbcffe31fb6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.715s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 754.372590] env[62522]: DEBUG oslo_vmware.api [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415298, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.664807} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.372994] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] e813e7da-fd2c-4f10-b2f3-1e2b5c153a19/e813e7da-fd2c-4f10-b2f3-1e2b5c153a19.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 754.373251] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 754.373780] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8063754c-7b6b-4912-ac1b-7cf3f0113b45 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.382040] env[62522]: DEBUG oslo_vmware.api [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Waiting for the task: (returnval){ [ 754.382040] env[62522]: value = "task-2415299" [ 754.382040] env[62522]: _type = "Task" [ 754.382040] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.391650] env[62522]: DEBUG oslo_vmware.api [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415299, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.662197] env[62522]: DEBUG oslo_concurrency.lockutils [None req-10df2d46-cc71-48c1-ab3d-d58d54b75b0d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquiring lock "879354d3-7423-41e2-93f6-0d8d3a120170" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.662197] env[62522]: DEBUG oslo_concurrency.lockutils [None req-10df2d46-cc71-48c1-ab3d-d58d54b75b0d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "879354d3-7423-41e2-93f6-0d8d3a120170" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 754.662197] env[62522]: DEBUG nova.compute.manager [None req-10df2d46-cc71-48c1-ab3d-d58d54b75b0d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Going to confirm migration 1 {{(pid=62522) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 754.713436] env[62522]: INFO nova.compute.manager [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Took 46.85 seconds to build instance. [ 754.814799] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.867702] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Acquiring lock "3824a70e-8498-410a-904d-c7cd0de0c358" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.867702] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Lock "3824a70e-8498-410a-904d-c7cd0de0c358" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 754.867777] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Acquiring lock "3824a70e-8498-410a-904d-c7cd0de0c358-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.868037] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Lock "3824a70e-8498-410a-904d-c7cd0de0c358-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 754.868143] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Lock "3824a70e-8498-410a-904d-c7cd0de0c358-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 754.870307] env[62522]: INFO nova.compute.manager [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Terminating instance [ 754.896098] env[62522]: DEBUG oslo_vmware.api [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415299, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085696} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.896098] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 754.896098] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3a7d7b4-c27c-40b4-b190-da133903ec4f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.920407] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Reconfiguring VM instance instance-00000022 to attach disk [datastore2] e813e7da-fd2c-4f10-b2f3-1e2b5c153a19/e813e7da-fd2c-4f10-b2f3-1e2b5c153a19.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 754.924464] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46eb69f9-73a2-4028-a4f7-6319e7c43ca9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.947316] env[62522]: DEBUG oslo_concurrency.lockutils [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "bf44e269-0297-473e-b6ce-04a40d0ec1b4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.947591] env[62522]: DEBUG oslo_concurrency.lockutils [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "bf44e269-0297-473e-b6ce-04a40d0ec1b4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 754.953297] env[62522]: DEBUG oslo_vmware.api [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Waiting for the task: (returnval){ [ 754.953297] env[62522]: value = "task-2415300" [ 754.953297] env[62522]: _type = "Task" [ 754.953297] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.962665] env[62522]: DEBUG oslo_vmware.api [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415300, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.202553] env[62522]: DEBUG nova.compute.manager [req-d59f7fa6-0650-4a25-a6d0-601447f0e793 req-8a41bd34-7bda-4e7b-a4cf-6993ba2b8828 service nova] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Received event network-changed-e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 755.202820] env[62522]: DEBUG nova.compute.manager [req-d59f7fa6-0650-4a25-a6d0-601447f0e793 req-8a41bd34-7bda-4e7b-a4cf-6993ba2b8828 service nova] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Refreshing instance network info cache due to event network-changed-e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 755.203038] env[62522]: DEBUG oslo_concurrency.lockutils [req-d59f7fa6-0650-4a25-a6d0-601447f0e793 req-8a41bd34-7bda-4e7b-a4cf-6993ba2b8828 service nova] Acquiring lock "refresh_cache-d266aff3-42b4-4dcb-b8ca-7c13cdf8d314" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 755.203183] env[62522]: DEBUG oslo_concurrency.lockutils [req-d59f7fa6-0650-4a25-a6d0-601447f0e793 req-8a41bd34-7bda-4e7b-a4cf-6993ba2b8828 service nova] Acquired lock "refresh_cache-d266aff3-42b4-4dcb-b8ca-7c13cdf8d314" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.203343] env[62522]: DEBUG nova.network.neutron [req-d59f7fa6-0650-4a25-a6d0-601447f0e793 req-8a41bd34-7bda-4e7b-a4cf-6993ba2b8828 service nova] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Refreshing network info cache for port e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 755.216321] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ae483440-e7d9-483e-bfee-475462244d18 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Lock "c1fd078c-61d4-4c0f-8c49-0f56a926a087" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.821s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 755.293198] env[62522]: DEBUG oslo_concurrency.lockutils [None req-10df2d46-cc71-48c1-ab3d-d58d54b75b0d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquiring lock "refresh_cache-879354d3-7423-41e2-93f6-0d8d3a120170" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 755.293198] env[62522]: DEBUG oslo_concurrency.lockutils [None req-10df2d46-cc71-48c1-ab3d-d58d54b75b0d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquired lock "refresh_cache-879354d3-7423-41e2-93f6-0d8d3a120170" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.294376] env[62522]: DEBUG nova.network.neutron [None req-10df2d46-cc71-48c1-ab3d-d58d54b75b0d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 755.294376] env[62522]: DEBUG nova.objects.instance [None req-10df2d46-cc71-48c1-ab3d-d58d54b75b0d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lazy-loading 'info_cache' on Instance uuid 879354d3-7423-41e2-93f6-0d8d3a120170 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 755.378590] env[62522]: DEBUG nova.compute.manager [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 755.378876] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 755.379720] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2154a297-f62c-4a1e-afbd-502dbd1fcd6e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.388716] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 755.389037] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-851545d3-c348-4ee4-b5ad-ac9dbd99223e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.395868] env[62522]: DEBUG oslo_vmware.api [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Waiting for the task: (returnval){ [ 755.395868] env[62522]: value = "task-2415301" [ 755.395868] env[62522]: _type = "Task" [ 755.395868] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.407441] env[62522]: DEBUG oslo_vmware.api [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': task-2415301, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.414916] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29499320-48bb-47ec-a369-20b02eef64f0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.422739] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88885ab4-2ac7-49e5-bcaf-f858bc68a542 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.459404] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe0d8fbe-3fd9-43c4-bcaf-6d741af0661e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.468258] env[62522]: DEBUG oslo_vmware.api [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415300, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.472046] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae4b148e-6d50-4c59-a799-e1d6de808849 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.492270] env[62522]: DEBUG nova.compute.provider_tree [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 755.580280] env[62522]: DEBUG nova.network.neutron [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Successfully updated port: 9f1e209f-6bc2-4b96-9c5d-830ee01139b8 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 755.720711] env[62522]: DEBUG nova.compute.manager [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 755.905750] env[62522]: DEBUG oslo_vmware.api [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': task-2415301, 'name': PowerOffVM_Task, 'duration_secs': 0.339504} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.905927] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 755.906714] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 755.908224] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8cbcdc58-2181-4d3b-a9b4-e062542f4fd9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.967764] env[62522]: DEBUG oslo_vmware.api [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415300, 'name': ReconfigVM_Task, 'duration_secs': 0.751296} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.968301] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Reconfigured VM instance instance-00000022 to attach disk [datastore2] e813e7da-fd2c-4f10-b2f3-1e2b5c153a19/e813e7da-fd2c-4f10-b2f3-1e2b5c153a19.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 755.968766] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b164998f-2f36-4731-9602-ee90ce0f2632 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.977353] env[62522]: DEBUG oslo_vmware.api [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Waiting for the task: (returnval){ [ 755.977353] env[62522]: value = "task-2415303" [ 755.977353] env[62522]: _type = "Task" [ 755.977353] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.983901] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 755.984186] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 755.984384] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Deleting the datastore file [datastore1] 3824a70e-8498-410a-904d-c7cd0de0c358 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 755.985009] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d1b22c63-e888-4868-a67d-9051901601ef {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.990193] env[62522]: DEBUG oslo_vmware.api [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415303, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.994896] env[62522]: DEBUG nova.scheduler.client.report [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 755.998330] env[62522]: DEBUG oslo_vmware.api [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Waiting for the task: (returnval){ [ 755.998330] env[62522]: value = "task-2415304" [ 755.998330] env[62522]: _type = "Task" [ 755.998330] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.006958] env[62522]: DEBUG oslo_vmware.api [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': task-2415304, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.085212] env[62522]: DEBUG oslo_concurrency.lockutils [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "refresh_cache-ebca687d-4de7-4fd6-99fb-b4f0154abe9c" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 756.085292] env[62522]: DEBUG oslo_concurrency.lockutils [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquired lock "refresh_cache-ebca687d-4de7-4fd6-99fb-b4f0154abe9c" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.085500] env[62522]: DEBUG nova.network.neutron [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 756.178871] env[62522]: DEBUG nova.network.neutron [req-d59f7fa6-0650-4a25-a6d0-601447f0e793 req-8a41bd34-7bda-4e7b-a4cf-6993ba2b8828 service nova] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Updated VIF entry in instance network info cache for port e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 756.179371] env[62522]: DEBUG nova.network.neutron [req-d59f7fa6-0650-4a25-a6d0-601447f0e793 req-8a41bd34-7bda-4e7b-a4cf-6993ba2b8828 service nova] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Updating instance_info_cache with network_info: [{"id": "e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55", "address": "fa:16:3e:94:f4:ed", "network": {"id": "eb0d2ded-859d-46b3-843d-bb580d0bfb6b", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-831675108-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.192", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0dba307f1fbf48bfac98d9836a72254e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0fac98f-bf", "ovs_interfaceid": "e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.251413] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 756.487259] env[62522]: DEBUG oslo_vmware.api [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415303, 'name': Rename_Task, 'duration_secs': 0.179943} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.488098] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 756.488496] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-854b8b41-eb6f-4047-86d8-0c128d51256e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.501707] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.732s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 756.502298] env[62522]: DEBUG nova.compute.manager [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 756.505485] env[62522]: DEBUG oslo_vmware.api [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Waiting for the task: (returnval){ [ 756.505485] env[62522]: value = "task-2415305" [ 756.505485] env[62522]: _type = "Task" [ 756.505485] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.509266] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.685s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 756.510660] env[62522]: INFO nova.compute.claims [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 756.525935] env[62522]: DEBUG oslo_vmware.api [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415305, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.530585] env[62522]: DEBUG oslo_vmware.api [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Task: {'id': task-2415304, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.288496} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.530876] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 756.531105] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 756.531301] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 756.531484] env[62522]: INFO nova.compute.manager [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Took 1.15 seconds to destroy the instance on the hypervisor. [ 756.531745] env[62522]: DEBUG oslo.service.loopingcall [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 756.531952] env[62522]: DEBUG nova.compute.manager [-] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 756.532065] env[62522]: DEBUG nova.network.neutron [-] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 756.682261] env[62522]: DEBUG oslo_concurrency.lockutils [req-d59f7fa6-0650-4a25-a6d0-601447f0e793 req-8a41bd34-7bda-4e7b-a4cf-6993ba2b8828 service nova] Releasing lock "refresh_cache-d266aff3-42b4-4dcb-b8ca-7c13cdf8d314" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 756.693430] env[62522]: DEBUG nova.compute.manager [req-cad3030d-373a-4e66-8365-4046f547ae9a req-b7186787-d90b-4981-9ac4-a98f861840c7 service nova] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Received event network-vif-plugged-9f1e209f-6bc2-4b96-9c5d-830ee01139b8 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 756.693784] env[62522]: DEBUG oslo_concurrency.lockutils [req-cad3030d-373a-4e66-8365-4046f547ae9a req-b7186787-d90b-4981-9ac4-a98f861840c7 service nova] Acquiring lock "ebca687d-4de7-4fd6-99fb-b4f0154abe9c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 756.694075] env[62522]: DEBUG oslo_concurrency.lockutils [req-cad3030d-373a-4e66-8365-4046f547ae9a req-b7186787-d90b-4981-9ac4-a98f861840c7 service nova] Lock "ebca687d-4de7-4fd6-99fb-b4f0154abe9c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 756.694296] env[62522]: DEBUG oslo_concurrency.lockutils [req-cad3030d-373a-4e66-8365-4046f547ae9a req-b7186787-d90b-4981-9ac4-a98f861840c7 service nova] Lock "ebca687d-4de7-4fd6-99fb-b4f0154abe9c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 756.694521] env[62522]: DEBUG nova.compute.manager [req-cad3030d-373a-4e66-8365-4046f547ae9a req-b7186787-d90b-4981-9ac4-a98f861840c7 service nova] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] No waiting events found dispatching network-vif-plugged-9f1e209f-6bc2-4b96-9c5d-830ee01139b8 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 756.694693] env[62522]: WARNING nova.compute.manager [req-cad3030d-373a-4e66-8365-4046f547ae9a req-b7186787-d90b-4981-9ac4-a98f861840c7 service nova] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Received unexpected event network-vif-plugged-9f1e209f-6bc2-4b96-9c5d-830ee01139b8 for instance with vm_state building and task_state spawning. [ 756.694882] env[62522]: DEBUG nova.compute.manager [req-cad3030d-373a-4e66-8365-4046f547ae9a req-b7186787-d90b-4981-9ac4-a98f861840c7 service nova] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Received event network-changed-9f1e209f-6bc2-4b96-9c5d-830ee01139b8 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 756.695050] env[62522]: DEBUG nova.compute.manager [req-cad3030d-373a-4e66-8365-4046f547ae9a req-b7186787-d90b-4981-9ac4-a98f861840c7 service nova] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Refreshing instance network info cache due to event network-changed-9f1e209f-6bc2-4b96-9c5d-830ee01139b8. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 756.695221] env[62522]: DEBUG oslo_concurrency.lockutils [req-cad3030d-373a-4e66-8365-4046f547ae9a req-b7186787-d90b-4981-9ac4-a98f861840c7 service nova] Acquiring lock "refresh_cache-ebca687d-4de7-4fd6-99fb-b4f0154abe9c" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 756.707187] env[62522]: DEBUG nova.network.neutron [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 756.925305] env[62522]: DEBUG nova.network.neutron [None req-10df2d46-cc71-48c1-ab3d-d58d54b75b0d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Updating instance_info_cache with network_info: [{"id": "cd619060-5655-434c-967f-7552adca021b", "address": "fa:16:3e:7d:62:dc", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd619060-56", "ovs_interfaceid": "cd619060-5655-434c-967f-7552adca021b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.008105] env[62522]: DEBUG nova.compute.utils [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 757.009607] env[62522]: DEBUG nova.compute.manager [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Not allocating networking since 'none' was specified. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 757.028365] env[62522]: DEBUG oslo_vmware.api [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415305, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.352369] env[62522]: DEBUG nova.network.neutron [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Updating instance_info_cache with network_info: [{"id": "9f1e209f-6bc2-4b96-9c5d-830ee01139b8", "address": "fa:16:3e:f8:24:fc", "network": {"id": "2037da36-cd13-4cb1-95f4-08d1e174336c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1895994075-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00b27498c07344d1bf9cecefa0fca033", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f1e209f-6b", "ovs_interfaceid": "9f1e209f-6bc2-4b96-9c5d-830ee01139b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.428530] env[62522]: DEBUG oslo_concurrency.lockutils [None req-10df2d46-cc71-48c1-ab3d-d58d54b75b0d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Releasing lock "refresh_cache-879354d3-7423-41e2-93f6-0d8d3a120170" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 757.428792] env[62522]: DEBUG nova.objects.instance [None req-10df2d46-cc71-48c1-ab3d-d58d54b75b0d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lazy-loading 'migration_context' on Instance uuid 879354d3-7423-41e2-93f6-0d8d3a120170 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 757.512553] env[62522]: DEBUG nova.compute.manager [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 757.532530] env[62522]: DEBUG oslo_vmware.api [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415305, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.854950] env[62522]: DEBUG oslo_concurrency.lockutils [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Releasing lock "refresh_cache-ebca687d-4de7-4fd6-99fb-b4f0154abe9c" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 757.855275] env[62522]: DEBUG nova.compute.manager [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Instance network_info: |[{"id": "9f1e209f-6bc2-4b96-9c5d-830ee01139b8", "address": "fa:16:3e:f8:24:fc", "network": {"id": "2037da36-cd13-4cb1-95f4-08d1e174336c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1895994075-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00b27498c07344d1bf9cecefa0fca033", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f1e209f-6b", "ovs_interfaceid": "9f1e209f-6bc2-4b96-9c5d-830ee01139b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 757.857803] env[62522]: DEBUG oslo_concurrency.lockutils [req-cad3030d-373a-4e66-8365-4046f547ae9a req-b7186787-d90b-4981-9ac4-a98f861840c7 service nova] Acquired lock "refresh_cache-ebca687d-4de7-4fd6-99fb-b4f0154abe9c" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.857993] env[62522]: DEBUG nova.network.neutron [req-cad3030d-373a-4e66-8365-4046f547ae9a req-b7186787-d90b-4981-9ac4-a98f861840c7 service nova] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Refreshing network info cache for port 9f1e209f-6bc2-4b96-9c5d-830ee01139b8 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 757.860083] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:24:fc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f65996a3-f865-4492-9377-cd14ec8b3aae', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9f1e209f-6bc2-4b96-9c5d-830ee01139b8', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 757.868162] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Creating folder: Project (00b27498c07344d1bf9cecefa0fca033). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 757.869538] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a80217a8-6e71-40e6-b21f-8d06bd49887e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.885599] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Created folder: Project (00b27498c07344d1bf9cecefa0fca033) in parent group-v489562. [ 757.885801] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Creating folder: Instances. Parent ref: group-v489659. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 757.886047] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a96a5b1c-57fe-430b-92a0-5eb2ebf8199b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.897765] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Created folder: Instances in parent group-v489659. [ 757.897765] env[62522]: DEBUG oslo.service.loopingcall [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 757.897765] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 757.899157] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b05aba12-160e-4ad0-b832-99900292b866 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.916924] env[62522]: DEBUG nova.network.neutron [-] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.924914] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 757.924914] env[62522]: value = "task-2415308" [ 757.924914] env[62522]: _type = "Task" [ 757.924914] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.933870] env[62522]: DEBUG nova.objects.base [None req-10df2d46-cc71-48c1-ab3d-d58d54b75b0d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Object Instance<879354d3-7423-41e2-93f6-0d8d3a120170> lazy-loaded attributes: info_cache,migration_context {{(pid=62522) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 757.934144] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415308, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.936980] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bb4585f-97ce-4fd4-ad65-46d0fc2085b4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.960378] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e79f0d3-2faa-4f02-8daf-8b67076ba283 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.967993] env[62522]: DEBUG oslo_vmware.api [None req-10df2d46-cc71-48c1-ab3d-d58d54b75b0d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 757.967993] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a420cf-3eed-6a0c-df49-64651b0692c8" [ 757.967993] env[62522]: _type = "Task" [ 757.967993] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.981600] env[62522]: DEBUG oslo_vmware.api [None req-10df2d46-cc71-48c1-ab3d-d58d54b75b0d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a420cf-3eed-6a0c-df49-64651b0692c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.039019] env[62522]: DEBUG oslo_vmware.api [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415305, 'name': PowerOnVM_Task, 'duration_secs': 1.111636} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.039019] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 758.039019] env[62522]: INFO nova.compute.manager [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Took 9.36 seconds to spawn the instance on the hypervisor. [ 758.039019] env[62522]: DEBUG nova.compute.manager [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 758.039019] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca691dad-cbfd-4937-9f0c-8537c7b670b2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.057130] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0acf1ca8-83d7-480b-8604-faa5603d2299 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.063035] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a34ca0d6-bad6-403b-ad3e-515b414fe32d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.098861] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f59b8252-43ed-4b85-9c93-69a6a3b90a1d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.107289] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52264842-3bcf-48c6-b942-47975db9716e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.122919] env[62522]: DEBUG nova.compute.provider_tree [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 758.141283] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Acquiring lock "41a980df-88a9-4f9b-b34b-905b226c0675" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 758.141283] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Lock "41a980df-88a9-4f9b-b34b-905b226c0675" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 758.420345] env[62522]: INFO nova.compute.manager [-] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Took 1.89 seconds to deallocate network for instance. [ 758.435405] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415308, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.480210] env[62522]: DEBUG oslo_vmware.api [None req-10df2d46-cc71-48c1-ab3d-d58d54b75b0d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a420cf-3eed-6a0c-df49-64651b0692c8, 'name': SearchDatastore_Task, 'duration_secs': 0.017534} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.480210] env[62522]: DEBUG oslo_concurrency.lockutils [None req-10df2d46-cc71-48c1-ab3d-d58d54b75b0d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 758.532345] env[62522]: DEBUG nova.compute.manager [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 758.564480] env[62522]: DEBUG nova.virt.hardware [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 758.564733] env[62522]: DEBUG nova.virt.hardware [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 758.564954] env[62522]: DEBUG nova.virt.hardware [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 758.565093] env[62522]: DEBUG nova.virt.hardware [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 758.565226] env[62522]: DEBUG nova.virt.hardware [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 758.565375] env[62522]: DEBUG nova.virt.hardware [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 758.565609] env[62522]: DEBUG nova.virt.hardware [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 758.565854] env[62522]: DEBUG nova.virt.hardware [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 758.566063] env[62522]: DEBUG nova.virt.hardware [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 758.566235] env[62522]: DEBUG nova.virt.hardware [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 758.566413] env[62522]: DEBUG nova.virt.hardware [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 758.569163] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2a596c5-4609-4145-8dc7-880f9e39031b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.575888] env[62522]: INFO nova.compute.manager [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Took 47.27 seconds to build instance. [ 758.583058] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a188505-9464-4160-b00d-efe0da67bd62 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.599448] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Instance VIF info [] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 758.605367] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Creating folder: Project (341ebc9d97c2461c925dd5f7df3fcc61). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 758.605938] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b25e3fe9-9cbe-41b4-ad30-ec0b175979c2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.616256] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Created folder: Project (341ebc9d97c2461c925dd5f7df3fcc61) in parent group-v489562. [ 758.616450] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Creating folder: Instances. Parent ref: group-v489662. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 758.616684] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4ce1ddc5-9217-41f0-b171-37fad666edd9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.629737] env[62522]: DEBUG nova.scheduler.client.report [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 758.632969] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Created folder: Instances in parent group-v489662. [ 758.633222] env[62522]: DEBUG oslo.service.loopingcall [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 758.633592] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 758.633800] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c53b15c-d743-41a8-8a6b-638da634e753 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.653048] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 758.653048] env[62522]: value = "task-2415311" [ 758.653048] env[62522]: _type = "Task" [ 758.653048] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.661029] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415311, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.750781] env[62522]: DEBUG nova.network.neutron [req-cad3030d-373a-4e66-8365-4046f547ae9a req-b7186787-d90b-4981-9ac4-a98f861840c7 service nova] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Updated VIF entry in instance network info cache for port 9f1e209f-6bc2-4b96-9c5d-830ee01139b8. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 758.751316] env[62522]: DEBUG nova.network.neutron [req-cad3030d-373a-4e66-8365-4046f547ae9a req-b7186787-d90b-4981-9ac4-a98f861840c7 service nova] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Updating instance_info_cache with network_info: [{"id": "9f1e209f-6bc2-4b96-9c5d-830ee01139b8", "address": "fa:16:3e:f8:24:fc", "network": {"id": "2037da36-cd13-4cb1-95f4-08d1e174336c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1895994075-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00b27498c07344d1bf9cecefa0fca033", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f1e209f-6b", "ovs_interfaceid": "9f1e209f-6bc2-4b96-9c5d-830ee01139b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.873833] env[62522]: DEBUG nova.compute.manager [req-607afbfd-2667-4faa-9a83-aca20f5efb88 req-85fdac63-e1df-4f9f-a12f-b4f917a8502c service nova] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Received event network-vif-deleted-a0e9b152-7b65-405a-8302-dc8561d06224 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 758.928880] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 758.935869] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415308, 'name': CreateVM_Task, 'duration_secs': 0.511614} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.939896] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 758.939896] env[62522]: DEBUG oslo_concurrency.lockutils [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 758.939896] env[62522]: DEBUG oslo_concurrency.lockutils [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.939896] env[62522]: DEBUG oslo_concurrency.lockutils [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 758.939896] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92c2faab-7fe0-4a53-96c7-b3f994cb472e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.951220] env[62522]: DEBUG oslo_vmware.api [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 758.951220] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52fc7080-4541-92de-df05-4057c6a52022" [ 758.951220] env[62522]: _type = "Task" [ 758.951220] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.959870] env[62522]: DEBUG oslo_vmware.api [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52fc7080-4541-92de-df05-4057c6a52022, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.077851] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c45f0f45-5d54-432a-9a05-6ebbe3199758 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Lock "e813e7da-fd2c-4f10-b2f3-1e2b5c153a19" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.773s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 759.134799] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.626s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 759.135362] env[62522]: DEBUG nova.compute.manager [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 759.138261] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 27.344s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 759.138694] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 759.139064] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62522) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 759.139466] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.842s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 759.139742] env[62522]: DEBUG nova.objects.instance [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Lazy-loading 'resources' on Instance uuid 8461f823-e48a-42f0-8863-44177565b82d {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 759.144835] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7a609df-7de2-491e-8a03-6805968eb265 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.151736] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c737a3d-4a26-45ab-b272-81894f04ccdf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.176273] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415311, 'name': CreateVM_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.179128] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c1ba29a-727a-40a3-b75a-525fb25000e6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.185402] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81a956a1-8ab6-44a4-9a77-b02883d31829 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.219288] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180335MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=62522) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 759.219541] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 759.254101] env[62522]: DEBUG oslo_concurrency.lockutils [req-cad3030d-373a-4e66-8365-4046f547ae9a req-b7186787-d90b-4981-9ac4-a98f861840c7 service nova] Releasing lock "refresh_cache-ebca687d-4de7-4fd6-99fb-b4f0154abe9c" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.465417] env[62522]: DEBUG oslo_vmware.api [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52fc7080-4541-92de-df05-4057c6a52022, 'name': SearchDatastore_Task, 'duration_secs': 0.011073} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.465417] env[62522]: DEBUG oslo_concurrency.lockutils [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.465417] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 759.465417] env[62522]: DEBUG oslo_concurrency.lockutils [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 759.465564] env[62522]: DEBUG oslo_concurrency.lockutils [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.465564] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 759.465564] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fc061972-40e9-4c79-aa89-e7e2620c8219 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.475286] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 759.475614] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 759.476541] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de146ca3-c093-422b-841b-2c1fca3e595c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.484655] env[62522]: DEBUG oslo_vmware.api [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 759.484655] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520c2702-587e-9600-306d-61b0e5cfd1ae" [ 759.484655] env[62522]: _type = "Task" [ 759.484655] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.495107] env[62522]: DEBUG oslo_vmware.api [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520c2702-587e-9600-306d-61b0e5cfd1ae, 'name': SearchDatastore_Task, 'duration_secs': 0.008887} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.496177] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91b3a957-38c3-46e7-861e-ca89349eec97 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.503356] env[62522]: DEBUG oslo_vmware.api [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 759.503356] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52cf473a-2c0b-3800-f2cc-ba5ea92ee5ba" [ 759.503356] env[62522]: _type = "Task" [ 759.503356] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.509713] env[62522]: DEBUG oslo_vmware.api [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52cf473a-2c0b-3800-f2cc-ba5ea92ee5ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.583924] env[62522]: DEBUG nova.compute.manager [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 759.643627] env[62522]: DEBUG nova.compute.utils [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 759.652372] env[62522]: DEBUG nova.compute.manager [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 759.652372] env[62522]: DEBUG nova.network.neutron [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 759.672975] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415311, 'name': CreateVM_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.758486] env[62522]: DEBUG nova.policy [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bdb614560a904ce5ba60a6a860ec3564', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ff68a180abec48d7bcf3f13e73cfed2e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 760.017610] env[62522]: DEBUG oslo_vmware.api [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52cf473a-2c0b-3800-f2cc-ba5ea92ee5ba, 'name': SearchDatastore_Task, 'duration_secs': 0.008743} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.018013] env[62522]: DEBUG oslo_concurrency.lockutils [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 760.018145] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] ebca687d-4de7-4fd6-99fb-b4f0154abe9c/ebca687d-4de7-4fd6-99fb-b4f0154abe9c.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 760.018448] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-432b23ee-a233-407c-820c-c00d81509872 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.029536] env[62522]: DEBUG oslo_vmware.api [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 760.029536] env[62522]: value = "task-2415312" [ 760.029536] env[62522]: _type = "Task" [ 760.029536] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.039686] env[62522]: DEBUG oslo_vmware.api [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415312, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.104157] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 760.153787] env[62522]: DEBUG nova.compute.manager [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 760.167929] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415311, 'name': CreateVM_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.199683] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6deb355c-d171-49ec-a9f8-fe868a559ed0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.207362] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c234acd-e60f-4379-8880-b092c706733e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.240125] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da94947c-44b1-44a9-82f1-5f2b4d3c6319 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.247383] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4646804a-045b-4867-bce4-c26dde88fce0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.262318] env[62522]: DEBUG nova.compute.provider_tree [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 760.407099] env[62522]: DEBUG nova.network.neutron [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Successfully created port: 290fda08-0629-455f-b80b-237754fd93f2 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 760.542496] env[62522]: DEBUG oslo_vmware.api [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415312, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.671957] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415311, 'name': CreateVM_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.737304] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d00b824-541f-434a-a79a-831353ee687a tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] Acquiring lock "bf2ccaeb-610a-437b-be94-d3caefbe15c5" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 760.737578] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d00b824-541f-434a-a79a-831353ee687a tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] Lock "bf2ccaeb-610a-437b-be94-d3caefbe15c5" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 760.765053] env[62522]: DEBUG nova.scheduler.client.report [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 761.041884] env[62522]: DEBUG oslo_vmware.api [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415312, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.650199} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.042188] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] ebca687d-4de7-4fd6-99fb-b4f0154abe9c/ebca687d-4de7-4fd6-99fb-b4f0154abe9c.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 761.043347] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 761.043598] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e00215b5-cafc-45da-b242-9f8d65bff559 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.050603] env[62522]: DEBUG oslo_vmware.api [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 761.050603] env[62522]: value = "task-2415313" [ 761.050603] env[62522]: _type = "Task" [ 761.050603] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.058817] env[62522]: DEBUG oslo_vmware.api [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415313, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.166923] env[62522]: DEBUG nova.compute.manager [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 761.173113] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415311, 'name': CreateVM_Task, 'duration_secs': 2.30185} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.173287] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 761.173686] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.173848] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.174199] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 761.174458] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0227f3b1-2258-46de-8ddb-21066c0fa485 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.178959] env[62522]: DEBUG oslo_vmware.api [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Waiting for the task: (returnval){ [ 761.178959] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5292fa41-bb38-51bf-11d9-b0ae0774f9ff" [ 761.178959] env[62522]: _type = "Task" [ 761.178959] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.188333] env[62522]: DEBUG oslo_vmware.api [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5292fa41-bb38-51bf-11d9-b0ae0774f9ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.194177] env[62522]: DEBUG nova.virt.hardware [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 761.194446] env[62522]: DEBUG nova.virt.hardware [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 761.194651] env[62522]: DEBUG nova.virt.hardware [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 761.194875] env[62522]: DEBUG nova.virt.hardware [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 761.195086] env[62522]: DEBUG nova.virt.hardware [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 761.195292] env[62522]: DEBUG nova.virt.hardware [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 761.195551] env[62522]: DEBUG nova.virt.hardware [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 761.195752] env[62522]: DEBUG nova.virt.hardware [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 761.195960] env[62522]: DEBUG nova.virt.hardware [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 761.196183] env[62522]: DEBUG nova.virt.hardware [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 761.196401] env[62522]: DEBUG nova.virt.hardware [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 761.197205] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3d72bb0-f56a-49b4-a48b-e7eac60555d7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.204282] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f109d6f-b9ff-4756-b5b7-38bb480951df {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.243594] env[62522]: DEBUG nova.compute.utils [None req-6d00b824-541f-434a-a79a-831353ee687a tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 761.270725] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.131s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 761.273075] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.316s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 761.274598] env[62522]: INFO nova.compute.claims [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 761.299767] env[62522]: INFO nova.scheduler.client.report [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Deleted allocations for instance 8461f823-e48a-42f0-8863-44177565b82d [ 761.317650] env[62522]: DEBUG nova.compute.manager [req-ec8f7775-22bc-460f-926e-4e9d594aac47 req-097ea2c3-fda8-4119-b799-b904f64d7efd service nova] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Received event network-changed-648bb769-184a-43cb-a66e-9b36814e4e4a {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 761.317956] env[62522]: DEBUG nova.compute.manager [req-ec8f7775-22bc-460f-926e-4e9d594aac47 req-097ea2c3-fda8-4119-b799-b904f64d7efd service nova] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Refreshing instance network info cache due to event network-changed-648bb769-184a-43cb-a66e-9b36814e4e4a. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 761.318111] env[62522]: DEBUG oslo_concurrency.lockutils [req-ec8f7775-22bc-460f-926e-4e9d594aac47 req-097ea2c3-fda8-4119-b799-b904f64d7efd service nova] Acquiring lock "refresh_cache-e813e7da-fd2c-4f10-b2f3-1e2b5c153a19" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.318260] env[62522]: DEBUG oslo_concurrency.lockutils [req-ec8f7775-22bc-460f-926e-4e9d594aac47 req-097ea2c3-fda8-4119-b799-b904f64d7efd service nova] Acquired lock "refresh_cache-e813e7da-fd2c-4f10-b2f3-1e2b5c153a19" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.318481] env[62522]: DEBUG nova.network.neutron [req-ec8f7775-22bc-460f-926e-4e9d594aac47 req-097ea2c3-fda8-4119-b799-b904f64d7efd service nova] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Refreshing network info cache for port 648bb769-184a-43cb-a66e-9b36814e4e4a {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 761.561737] env[62522]: DEBUG oslo_vmware.api [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415313, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066154} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.561989] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 761.562797] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b691ca6f-11f7-4de3-ac9c-89fb8a63d38e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.587203] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] ebca687d-4de7-4fd6-99fb-b4f0154abe9c/ebca687d-4de7-4fd6-99fb-b4f0154abe9c.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 761.587893] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7eb51df5-4caa-43ec-883b-ccdb3f92d182 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.607832] env[62522]: DEBUG oslo_vmware.api [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 761.607832] env[62522]: value = "task-2415314" [ 761.607832] env[62522]: _type = "Task" [ 761.607832] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.615952] env[62522]: DEBUG oslo_vmware.api [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415314, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.689775] env[62522]: DEBUG oslo_vmware.api [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5292fa41-bb38-51bf-11d9-b0ae0774f9ff, 'name': SearchDatastore_Task, 'duration_secs': 0.009584} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.690257] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 761.690327] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 761.690554] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.690716] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.690886] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 761.691180] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1472384-99e0-4308-9231-43279173d090 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.699350] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 761.699537] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 761.700311] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa9191aa-ccbb-41f5-a4f1-9a3a9d9f553d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.705980] env[62522]: DEBUG oslo_vmware.api [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Waiting for the task: (returnval){ [ 761.705980] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52701e1b-ffe4-3176-4569-d30e456d7d61" [ 761.705980] env[62522]: _type = "Task" [ 761.705980] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.713510] env[62522]: DEBUG oslo_vmware.api [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52701e1b-ffe4-3176-4569-d30e456d7d61, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.746411] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d00b824-541f-434a-a79a-831353ee687a tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] Lock "bf2ccaeb-610a-437b-be94-d3caefbe15c5" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 761.811892] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c8c71892-5106-4ddd-a273-d873f643721b tempest-ImagesNegativeTestJSON-793705757 tempest-ImagesNegativeTestJSON-793705757-project-member] Lock "8461f823-e48a-42f0-8863-44177565b82d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.956s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 762.119082] env[62522]: DEBUG oslo_vmware.api [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415314, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.215600] env[62522]: DEBUG oslo_vmware.api [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52701e1b-ffe4-3176-4569-d30e456d7d61, 'name': SearchDatastore_Task, 'duration_secs': 0.010008} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.216450] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4043bd6f-83f1-4cf3-9463-04b27d140c2c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.224059] env[62522]: DEBUG oslo_vmware.api [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Waiting for the task: (returnval){ [ 762.224059] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b390db-f0c1-1bac-1338-8a3e89fbf2f0" [ 762.224059] env[62522]: _type = "Task" [ 762.224059] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.232934] env[62522]: DEBUG oslo_vmware.api [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b390db-f0c1-1bac-1338-8a3e89fbf2f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.555056] env[62522]: DEBUG nova.network.neutron [req-ec8f7775-22bc-460f-926e-4e9d594aac47 req-097ea2c3-fda8-4119-b799-b904f64d7efd service nova] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Updated VIF entry in instance network info cache for port 648bb769-184a-43cb-a66e-9b36814e4e4a. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 762.555056] env[62522]: DEBUG nova.network.neutron [req-ec8f7775-22bc-460f-926e-4e9d594aac47 req-097ea2c3-fda8-4119-b799-b904f64d7efd service nova] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Updating instance_info_cache with network_info: [{"id": "648bb769-184a-43cb-a66e-9b36814e4e4a", "address": "fa:16:3e:c0:ce:1b", "network": {"id": "6be1ee4a-ede6-491a-8cf1-6f237b02c850", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1134150376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce263d67988b4448b181b122b9270155", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f49a7d-c6e5-404f-b71a-91d8c070cd18", "external-id": "nsx-vlan-transportzone-120", "segmentation_id": 120, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap648bb769-18", "ovs_interfaceid": "648bb769-184a-43cb-a66e-9b36814e4e4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.620662] env[62522]: DEBUG oslo_vmware.api [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415314, 'name': ReconfigVM_Task, 'duration_secs': 0.638891} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.620948] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Reconfigured VM instance instance-00000023 to attach disk [datastore1] ebca687d-4de7-4fd6-99fb-b4f0154abe9c/ebca687d-4de7-4fd6-99fb-b4f0154abe9c.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 762.621693] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c50ec550-ee30-4f38-8d8b-f82c26633e29 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.628362] env[62522]: DEBUG oslo_vmware.api [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 762.628362] env[62522]: value = "task-2415315" [ 762.628362] env[62522]: _type = "Task" [ 762.628362] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.639406] env[62522]: DEBUG oslo_vmware.api [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415315, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.695807] env[62522]: DEBUG nova.network.neutron [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Successfully updated port: 290fda08-0629-455f-b80b-237754fd93f2 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 762.740103] env[62522]: DEBUG oslo_vmware.api [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b390db-f0c1-1bac-1338-8a3e89fbf2f0, 'name': SearchDatastore_Task, 'duration_secs': 0.010109} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.742779] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 762.743700] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 63a7f41d-13cc-420a-96d3-a3f102869137/63a7f41d-13cc-420a-96d3-a3f102869137.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 762.744211] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2bd9f68a-9f7c-45f1-8156-d8f93d3c63dc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.751734] env[62522]: DEBUG oslo_vmware.api [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Waiting for the task: (returnval){ [ 762.751734] env[62522]: value = "task-2415316" [ 762.751734] env[62522]: _type = "Task" [ 762.751734] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.764738] env[62522]: DEBUG oslo_vmware.api [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415316, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.815023] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-310f388d-7de1-4854-9871-1ccdae7c558d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.820894] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8694def-4ccb-4d04-acb7-70f067d402ba {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.857134] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d00b824-541f-434a-a79a-831353ee687a tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] Acquiring lock "bf2ccaeb-610a-437b-be94-d3caefbe15c5" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 762.857134] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d00b824-541f-434a-a79a-831353ee687a tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] Lock "bf2ccaeb-610a-437b-be94-d3caefbe15c5" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 762.857134] env[62522]: INFO nova.compute.manager [None req-6d00b824-541f-434a-a79a-831353ee687a tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Attaching volume 576c8720-9070-4d05-af30-8ea24f60700e to /dev/sdb [ 762.859089] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0207f2a2-6be7-47e2-9617-294ac833071b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.866671] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92d62260-c0d8-4b60-a667-807df2835dc8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.883541] env[62522]: DEBUG nova.compute.provider_tree [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 762.897968] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4f13202-d0f0-4cc5-bf42-a2d72d640bb6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.905199] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4113dbf-cfe9-415e-a95a-3da166453b78 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.919722] env[62522]: DEBUG nova.virt.block_device [None req-6d00b824-541f-434a-a79a-831353ee687a tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Updating existing volume attachment record: d648fdaf-b9d0-415e-9587-9fafa9cf2836 {{(pid=62522) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 763.059343] env[62522]: DEBUG oslo_concurrency.lockutils [req-ec8f7775-22bc-460f-926e-4e9d594aac47 req-097ea2c3-fda8-4119-b799-b904f64d7efd service nova] Releasing lock "refresh_cache-e813e7da-fd2c-4f10-b2f3-1e2b5c153a19" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 763.143250] env[62522]: DEBUG oslo_vmware.api [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415315, 'name': Rename_Task, 'duration_secs': 0.251928} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.143741] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 763.144391] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c3cff59b-fafc-4d74-aa2a-3680e6cdeb3c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.154753] env[62522]: DEBUG oslo_vmware.api [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 763.154753] env[62522]: value = "task-2415317" [ 763.154753] env[62522]: _type = "Task" [ 763.154753] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.166357] env[62522]: DEBUG oslo_vmware.api [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415317, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.203283] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquiring lock "refresh_cache-d30397b4-c617-4717-b624-ad1b06331bea" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.203476] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquired lock "refresh_cache-d30397b4-c617-4717-b624-ad1b06331bea" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.203677] env[62522]: DEBUG nova.network.neutron [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 763.264186] env[62522]: DEBUG oslo_vmware.api [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415316, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.354849] env[62522]: DEBUG nova.compute.manager [req-33165bbf-5fbb-4623-a113-871d34d4eee7 req-bf2f689e-8aa7-44ef-b0db-28a531cd36bf service nova] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Received event network-vif-plugged-290fda08-0629-455f-b80b-237754fd93f2 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 763.356107] env[62522]: DEBUG oslo_concurrency.lockutils [req-33165bbf-5fbb-4623-a113-871d34d4eee7 req-bf2f689e-8aa7-44ef-b0db-28a531cd36bf service nova] Acquiring lock "d30397b4-c617-4717-b624-ad1b06331bea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 763.356416] env[62522]: DEBUG oslo_concurrency.lockutils [req-33165bbf-5fbb-4623-a113-871d34d4eee7 req-bf2f689e-8aa7-44ef-b0db-28a531cd36bf service nova] Lock "d30397b4-c617-4717-b624-ad1b06331bea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 763.356762] env[62522]: DEBUG oslo_concurrency.lockutils [req-33165bbf-5fbb-4623-a113-871d34d4eee7 req-bf2f689e-8aa7-44ef-b0db-28a531cd36bf service nova] Lock "d30397b4-c617-4717-b624-ad1b06331bea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 763.357066] env[62522]: DEBUG nova.compute.manager [req-33165bbf-5fbb-4623-a113-871d34d4eee7 req-bf2f689e-8aa7-44ef-b0db-28a531cd36bf service nova] [instance: d30397b4-c617-4717-b624-ad1b06331bea] No waiting events found dispatching network-vif-plugged-290fda08-0629-455f-b80b-237754fd93f2 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 763.357313] env[62522]: WARNING nova.compute.manager [req-33165bbf-5fbb-4623-a113-871d34d4eee7 req-bf2f689e-8aa7-44ef-b0db-28a531cd36bf service nova] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Received unexpected event network-vif-plugged-290fda08-0629-455f-b80b-237754fd93f2 for instance with vm_state building and task_state spawning. [ 763.357589] env[62522]: DEBUG nova.compute.manager [req-33165bbf-5fbb-4623-a113-871d34d4eee7 req-bf2f689e-8aa7-44ef-b0db-28a531cd36bf service nova] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Received event network-changed-290fda08-0629-455f-b80b-237754fd93f2 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 763.357854] env[62522]: DEBUG nova.compute.manager [req-33165bbf-5fbb-4623-a113-871d34d4eee7 req-bf2f689e-8aa7-44ef-b0db-28a531cd36bf service nova] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Refreshing instance network info cache due to event network-changed-290fda08-0629-455f-b80b-237754fd93f2. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 763.358150] env[62522]: DEBUG oslo_concurrency.lockutils [req-33165bbf-5fbb-4623-a113-871d34d4eee7 req-bf2f689e-8aa7-44ef-b0db-28a531cd36bf service nova] Acquiring lock "refresh_cache-d30397b4-c617-4717-b624-ad1b06331bea" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.387396] env[62522]: DEBUG nova.scheduler.client.report [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 763.664569] env[62522]: DEBUG oslo_vmware.api [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415317, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.761591] env[62522]: DEBUG nova.network.neutron [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 763.770842] env[62522]: DEBUG oslo_vmware.api [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415316, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.675419} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.771162] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 63a7f41d-13cc-420a-96d3-a3f102869137/63a7f41d-13cc-420a-96d3-a3f102869137.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 763.771646] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 763.771718] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-db6703a0-cf46-4bcb-a683-9168601db2ac {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.779053] env[62522]: DEBUG oslo_vmware.api [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Waiting for the task: (returnval){ [ 763.779053] env[62522]: value = "task-2415322" [ 763.779053] env[62522]: _type = "Task" [ 763.779053] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.788319] env[62522]: DEBUG oslo_vmware.api [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415322, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.895449] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.622s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 763.895712] env[62522]: DEBUG nova.compute.manager [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 763.900064] env[62522]: DEBUG oslo_concurrency.lockutils [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.771s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 763.902119] env[62522]: INFO nova.compute.claims [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 764.035558] env[62522]: DEBUG nova.network.neutron [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Updating instance_info_cache with network_info: [{"id": "290fda08-0629-455f-b80b-237754fd93f2", "address": "fa:16:3e:2b:22:54", "network": {"id": "c57ecf55-229f-499c-8cf0-0ae209127cf5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1236590821-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff68a180abec48d7bcf3f13e73cfed2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f3a2eb5-353f-45c5-a73b-869626f4bb13", "external-id": "nsx-vlan-transportzone-411", "segmentation_id": 411, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap290fda08-06", "ovs_interfaceid": "290fda08-0629-455f-b80b-237754fd93f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.166277] env[62522]: DEBUG oslo_vmware.api [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415317, 'name': PowerOnVM_Task} progress is 79%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.290106] env[62522]: DEBUG oslo_vmware.api [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415322, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075688} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.290568] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 764.291556] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-818b4c43-e871-4114-a1db-a0372a87990a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.316020] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] 63a7f41d-13cc-420a-96d3-a3f102869137/63a7f41d-13cc-420a-96d3-a3f102869137.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 764.316310] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c29c6f8e-eba4-45f5-bd39-7267ea07baae {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.337099] env[62522]: DEBUG oslo_vmware.api [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Waiting for the task: (returnval){ [ 764.337099] env[62522]: value = "task-2415323" [ 764.337099] env[62522]: _type = "Task" [ 764.337099] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.345326] env[62522]: DEBUG oslo_vmware.api [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415323, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.401883] env[62522]: DEBUG nova.compute.utils [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 764.403124] env[62522]: DEBUG nova.compute.manager [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 764.403353] env[62522]: DEBUG nova.network.neutron [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 764.490814] env[62522]: DEBUG nova.policy [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bdb614560a904ce5ba60a6a860ec3564', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ff68a180abec48d7bcf3f13e73cfed2e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 764.538459] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Releasing lock "refresh_cache-d30397b4-c617-4717-b624-ad1b06331bea" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.538823] env[62522]: DEBUG nova.compute.manager [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Instance network_info: |[{"id": "290fda08-0629-455f-b80b-237754fd93f2", "address": "fa:16:3e:2b:22:54", "network": {"id": "c57ecf55-229f-499c-8cf0-0ae209127cf5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1236590821-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff68a180abec48d7bcf3f13e73cfed2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f3a2eb5-353f-45c5-a73b-869626f4bb13", "external-id": "nsx-vlan-transportzone-411", "segmentation_id": 411, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap290fda08-06", "ovs_interfaceid": "290fda08-0629-455f-b80b-237754fd93f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 764.539293] env[62522]: DEBUG oslo_concurrency.lockutils [req-33165bbf-5fbb-4623-a113-871d34d4eee7 req-bf2f689e-8aa7-44ef-b0db-28a531cd36bf service nova] Acquired lock "refresh_cache-d30397b4-c617-4717-b624-ad1b06331bea" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.539518] env[62522]: DEBUG nova.network.neutron [req-33165bbf-5fbb-4623-a113-871d34d4eee7 req-bf2f689e-8aa7-44ef-b0db-28a531cd36bf service nova] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Refreshing network info cache for port 290fda08-0629-455f-b80b-237754fd93f2 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 764.540888] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:22:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9f3a2eb5-353f-45c5-a73b-869626f4bb13', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '290fda08-0629-455f-b80b-237754fd93f2', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 764.549925] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Creating folder: Project (ff68a180abec48d7bcf3f13e73cfed2e). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 764.551312] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3156e397-e379-494d-8fea-722b70ffc045 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.572332] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Created folder: Project (ff68a180abec48d7bcf3f13e73cfed2e) in parent group-v489562. [ 764.573733] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Creating folder: Instances. Parent ref: group-v489668. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 764.573733] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8f736de9-d3bd-495e-8a90-1da7ad6b000f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.581343] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Created folder: Instances in parent group-v489668. [ 764.581585] env[62522]: DEBUG oslo.service.loopingcall [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 764.581772] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 764.581972] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-640404c9-a158-42ca-b28f-21b2947defd6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.601977] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 764.601977] env[62522]: value = "task-2415326" [ 764.601977] env[62522]: _type = "Task" [ 764.601977] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.609493] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415326, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.665585] env[62522]: DEBUG oslo_vmware.api [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415317, 'name': PowerOnVM_Task, 'duration_secs': 1.40925} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.665898] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 764.666128] env[62522]: INFO nova.compute.manager [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Took 11.07 seconds to spawn the instance on the hypervisor. [ 764.666334] env[62522]: DEBUG nova.compute.manager [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 764.667279] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30c512ac-8c28-406a-baf5-b31d4721640a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.848247] env[62522]: DEBUG oslo_vmware.api [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415323, 'name': ReconfigVM_Task, 'duration_secs': 0.430126} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.848247] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Reconfigured VM instance instance-00000024 to attach disk [datastore1] 63a7f41d-13cc-420a-96d3-a3f102869137/63a7f41d-13cc-420a-96d3-a3f102869137.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 764.848247] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b3dd7f3e-491f-4d8b-964f-bc5dbd6f445b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.856221] env[62522]: DEBUG oslo_vmware.api [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Waiting for the task: (returnval){ [ 764.856221] env[62522]: value = "task-2415327" [ 764.856221] env[62522]: _type = "Task" [ 764.856221] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.864267] env[62522]: DEBUG oslo_vmware.api [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415327, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.906359] env[62522]: DEBUG nova.compute.manager [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 765.111602] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415326, 'name': CreateVM_Task, 'duration_secs': 0.41452} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.111602] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 765.112415] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 765.112585] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.112898] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 765.114114] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2106e96c-38ff-4da0-835a-c7d8a1ed6068 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.122328] env[62522]: DEBUG oslo_vmware.api [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for the task: (returnval){ [ 765.122328] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52382db7-285a-0e0f-6b6f-b2bc45c1db8c" [ 765.122328] env[62522]: _type = "Task" [ 765.122328] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.130470] env[62522]: DEBUG oslo_vmware.api [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52382db7-285a-0e0f-6b6f-b2bc45c1db8c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.190191] env[62522]: INFO nova.compute.manager [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Took 43.50 seconds to build instance. [ 765.258468] env[62522]: DEBUG nova.network.neutron [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Successfully created port: fa2a1b8f-4097-4665-a83e-74536b00779c {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 765.369503] env[62522]: DEBUG oslo_vmware.api [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415327, 'name': Rename_Task, 'duration_secs': 0.185446} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.371622] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 765.371850] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f20f1445-c5fd-4552-84fb-15b65a34a4bf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.380930] env[62522]: DEBUG oslo_vmware.api [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Waiting for the task: (returnval){ [ 765.380930] env[62522]: value = "task-2415328" [ 765.380930] env[62522]: _type = "Task" [ 765.380930] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.389531] env[62522]: DEBUG oslo_vmware.api [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415328, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.484240] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9dfcb70-d37c-4091-9c3d-5d090d39384d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.494773] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e88ea9e-22d0-4708-b8bc-46006cac517d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.530888] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc250f8c-bc37-4cf0-9a05-a5606cd2ff00 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.539328] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe59a017-8c28-4ba2-8650-1f0dc832ffbc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.554260] env[62522]: DEBUG nova.compute.provider_tree [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 765.632932] env[62522]: DEBUG oslo_vmware.api [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52382db7-285a-0e0f-6b6f-b2bc45c1db8c, 'name': SearchDatastore_Task, 'duration_secs': 0.025819} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.633277] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 765.633512] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 765.635755] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 765.635755] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.635755] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 765.635755] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b763f11f-d83e-4dbe-b524-12ac0de4df9a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.645322] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 765.645510] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 765.647272] env[62522]: DEBUG nova.network.neutron [req-33165bbf-5fbb-4623-a113-871d34d4eee7 req-bf2f689e-8aa7-44ef-b0db-28a531cd36bf service nova] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Updated VIF entry in instance network info cache for port 290fda08-0629-455f-b80b-237754fd93f2. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 765.647272] env[62522]: DEBUG nova.network.neutron [req-33165bbf-5fbb-4623-a113-871d34d4eee7 req-bf2f689e-8aa7-44ef-b0db-28a531cd36bf service nova] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Updating instance_info_cache with network_info: [{"id": "290fda08-0629-455f-b80b-237754fd93f2", "address": "fa:16:3e:2b:22:54", "network": {"id": "c57ecf55-229f-499c-8cf0-0ae209127cf5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1236590821-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff68a180abec48d7bcf3f13e73cfed2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f3a2eb5-353f-45c5-a73b-869626f4bb13", "external-id": "nsx-vlan-transportzone-411", "segmentation_id": 411, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap290fda08-06", "ovs_interfaceid": "290fda08-0629-455f-b80b-237754fd93f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.648334] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb8830e2-7414-445f-bc02-6cec1a8385bf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.654067] env[62522]: DEBUG oslo_vmware.api [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for the task: (returnval){ [ 765.654067] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52125ac8-5523-b3a0-e5cd-805f644e6e60" [ 765.654067] env[62522]: _type = "Task" [ 765.654067] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.661550] env[62522]: DEBUG oslo_vmware.api [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52125ac8-5523-b3a0-e5cd-805f644e6e60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.692459] env[62522]: DEBUG oslo_concurrency.lockutils [None req-718e0f80-c3fb-4a6a-97ea-cde0aaef64b3 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "ebca687d-4de7-4fd6-99fb-b4f0154abe9c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 93.681s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 765.893356] env[62522]: DEBUG oslo_vmware.api [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415328, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.920168] env[62522]: DEBUG nova.compute.manager [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 765.948589] env[62522]: DEBUG nova.virt.hardware [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 765.948829] env[62522]: DEBUG nova.virt.hardware [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 765.948993] env[62522]: DEBUG nova.virt.hardware [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 765.949203] env[62522]: DEBUG nova.virt.hardware [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 765.949352] env[62522]: DEBUG nova.virt.hardware [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 765.949504] env[62522]: DEBUG nova.virt.hardware [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 765.949774] env[62522]: DEBUG nova.virt.hardware [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 765.950177] env[62522]: DEBUG nova.virt.hardware [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 765.950177] env[62522]: DEBUG nova.virt.hardware [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 765.950286] env[62522]: DEBUG nova.virt.hardware [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 765.950442] env[62522]: DEBUG nova.virt.hardware [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 765.951405] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-289e8356-a368-4faf-8487-8877f341d6f8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.959310] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16423280-e0b9-477e-8ca9-56a2bcf98f49 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.058562] env[62522]: DEBUG nova.scheduler.client.report [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 766.153037] env[62522]: DEBUG oslo_concurrency.lockutils [req-33165bbf-5fbb-4623-a113-871d34d4eee7 req-bf2f689e-8aa7-44ef-b0db-28a531cd36bf service nova] Releasing lock "refresh_cache-d30397b4-c617-4717-b624-ad1b06331bea" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 766.171066] env[62522]: DEBUG oslo_vmware.api [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52125ac8-5523-b3a0-e5cd-805f644e6e60, 'name': SearchDatastore_Task, 'duration_secs': 0.009317} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.173395] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-254b6c2e-cf2f-4c93-a12a-c2940d490eb7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.177969] env[62522]: DEBUG oslo_vmware.api [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for the task: (returnval){ [ 766.177969] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5287a77e-872e-deaf-1c5c-6e704a624567" [ 766.177969] env[62522]: _type = "Task" [ 766.177969] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.186259] env[62522]: DEBUG oslo_vmware.api [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5287a77e-872e-deaf-1c5c-6e704a624567, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.194848] env[62522]: DEBUG nova.compute.manager [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 766.391796] env[62522]: DEBUG oslo_vmware.api [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415328, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.564032] env[62522]: DEBUG oslo_concurrency.lockutils [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.664s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 766.564436] env[62522]: DEBUG nova.compute.manager [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 766.568951] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.842s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 766.568951] env[62522]: DEBUG nova.objects.instance [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Lazy-loading 'resources' on Instance uuid 87a90c88-6e0a-4051-8978-b2f9c5a876ca {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 766.688833] env[62522]: DEBUG oslo_vmware.api [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5287a77e-872e-deaf-1c5c-6e704a624567, 'name': SearchDatastore_Task, 'duration_secs': 0.009442} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.688833] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 766.688833] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] d30397b4-c617-4717-b624-ad1b06331bea/d30397b4-c617-4717-b624-ad1b06331bea.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 766.689057] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2e9c0587-a09c-4915-95f5-3c0fec06f199 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.703613] env[62522]: DEBUG oslo_vmware.api [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for the task: (returnval){ [ 766.703613] env[62522]: value = "task-2415330" [ 766.703613] env[62522]: _type = "Task" [ 766.703613] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.724639] env[62522]: DEBUG oslo_vmware.api [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415330, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.730307] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 766.895777] env[62522]: DEBUG oslo_vmware.api [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415328, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.072026] env[62522]: DEBUG nova.compute.utils [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 767.072026] env[62522]: DEBUG nova.compute.manager [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 767.072026] env[62522]: DEBUG nova.network.neutron [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 767.151815] env[62522]: DEBUG nova.policy [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bdb614560a904ce5ba60a6a860ec3564', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ff68a180abec48d7bcf3f13e73cfed2e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 767.179179] env[62522]: DEBUG nova.compute.manager [req-a4e46246-8e31-4151-91ee-94e2cbff93bc req-227d7369-918a-4aa1-bc85-7f289b051903 service nova] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Received event network-vif-plugged-fa2a1b8f-4097-4665-a83e-74536b00779c {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 767.179465] env[62522]: DEBUG oslo_concurrency.lockutils [req-a4e46246-8e31-4151-91ee-94e2cbff93bc req-227d7369-918a-4aa1-bc85-7f289b051903 service nova] Acquiring lock "0d36b844-554e-46e7-9cf9-ef04b67e8898-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.179671] env[62522]: DEBUG oslo_concurrency.lockutils [req-a4e46246-8e31-4151-91ee-94e2cbff93bc req-227d7369-918a-4aa1-bc85-7f289b051903 service nova] Lock "0d36b844-554e-46e7-9cf9-ef04b67e8898-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 767.179917] env[62522]: DEBUG oslo_concurrency.lockutils [req-a4e46246-8e31-4151-91ee-94e2cbff93bc req-227d7369-918a-4aa1-bc85-7f289b051903 service nova] Lock "0d36b844-554e-46e7-9cf9-ef04b67e8898-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 767.180014] env[62522]: DEBUG nova.compute.manager [req-a4e46246-8e31-4151-91ee-94e2cbff93bc req-227d7369-918a-4aa1-bc85-7f289b051903 service nova] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] No waiting events found dispatching network-vif-plugged-fa2a1b8f-4097-4665-a83e-74536b00779c {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 767.180208] env[62522]: WARNING nova.compute.manager [req-a4e46246-8e31-4151-91ee-94e2cbff93bc req-227d7369-918a-4aa1-bc85-7f289b051903 service nova] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Received unexpected event network-vif-plugged-fa2a1b8f-4097-4665-a83e-74536b00779c for instance with vm_state building and task_state spawning. [ 767.226025] env[62522]: DEBUG oslo_vmware.api [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415330, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.397946] env[62522]: DEBUG oslo_vmware.api [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415328, 'name': PowerOnVM_Task, 'duration_secs': 1.780559} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.401066] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 767.401345] env[62522]: INFO nova.compute.manager [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Took 8.87 seconds to spawn the instance on the hypervisor. [ 767.401571] env[62522]: DEBUG nova.compute.manager [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 767.406021] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b464247-cb6a-42e7-a23f-02d876b26522 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.466890] env[62522]: DEBUG nova.network.neutron [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Successfully updated port: fa2a1b8f-4097-4665-a83e-74536b00779c {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 767.583271] env[62522]: DEBUG nova.compute.manager [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 767.727529] env[62522]: DEBUG oslo_vmware.api [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415330, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.532218} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.729251] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] d30397b4-c617-4717-b624-ad1b06331bea/d30397b4-c617-4717-b624-ad1b06331bea.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 767.729481] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 767.732036] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0356a08a-c7fb-4812-94d7-5ebd7b579347 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.734612] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "ed7220fa-fee9-4715-acbb-236682c6729e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.734801] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "ed7220fa-fee9-4715-acbb-236682c6729e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 767.736507] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ab7dd2d-bd84-453d-8d92-1876513cc4e1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.746140] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aea10b1-a87b-4472-bc62-cda8b3498cb6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.749870] env[62522]: DEBUG oslo_vmware.api [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for the task: (returnval){ [ 767.749870] env[62522]: value = "task-2415331" [ 767.749870] env[62522]: _type = "Task" [ 767.749870] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.779475] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8832eb34-4d16-4e39-a1d4-ae06562520a2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.785296] env[62522]: DEBUG oslo_vmware.api [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415331, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.789951] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-965488e2-da38-4887-84dd-f2079353e241 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.804968] env[62522]: DEBUG nova.compute.provider_tree [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 767.811738] env[62522]: DEBUG nova.network.neutron [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Successfully created port: bd57fd68-6d70-40a6-beb5-73e810ccf037 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 767.922763] env[62522]: INFO nova.compute.manager [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Took 43.82 seconds to build instance. [ 767.972230] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquiring lock "refresh_cache-0d36b844-554e-46e7-9cf9-ef04b67e8898" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 767.973089] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquired lock "refresh_cache-0d36b844-554e-46e7-9cf9-ef04b67e8898" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.973089] env[62522]: DEBUG nova.network.neutron [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 767.987958] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d00b824-541f-434a-a79a-831353ee687a tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Volume attach. Driver type: vmdk {{(pid=62522) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 767.989031] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d00b824-541f-434a-a79a-831353ee687a tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489667', 'volume_id': '576c8720-9070-4d05-af30-8ea24f60700e', 'name': 'volume-576c8720-9070-4d05-af30-8ea24f60700e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'bf2ccaeb-610a-437b-be94-d3caefbe15c5', 'attached_at': '', 'detached_at': '', 'volume_id': '576c8720-9070-4d05-af30-8ea24f60700e', 'serial': '576c8720-9070-4d05-af30-8ea24f60700e'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 767.989117] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66407ada-e945-4175-8179-bcf58515e1a0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.008155] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e01f163-3d7a-4af0-88f6-4a9ea35897af {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.034138] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d00b824-541f-434a-a79a-831353ee687a tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] volume-576c8720-9070-4d05-af30-8ea24f60700e/volume-576c8720-9070-4d05-af30-8ea24f60700e.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 768.034455] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b0d250c-b041-43ad-822d-efb05f81c1a1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.053181] env[62522]: DEBUG oslo_vmware.api [None req-6d00b824-541f-434a-a79a-831353ee687a tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] Waiting for the task: (returnval){ [ 768.053181] env[62522]: value = "task-2415332" [ 768.053181] env[62522]: _type = "Task" [ 768.053181] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.061826] env[62522]: DEBUG oslo_vmware.api [None req-6d00b824-541f-434a-a79a-831353ee687a tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] Task: {'id': task-2415332, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.260802] env[62522]: DEBUG oslo_vmware.api [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415331, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064514} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.261253] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 768.262138] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80dd4987-05f2-41b0-ae10-e86b407510a6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.292416] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] d30397b4-c617-4717-b624-ad1b06331bea/d30397b4-c617-4717-b624-ad1b06331bea.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 768.293631] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-654f22ab-5684-4a93-b44f-a9c9aa90a0f8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.313576] env[62522]: DEBUG nova.scheduler.client.report [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 768.323244] env[62522]: DEBUG oslo_vmware.api [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for the task: (returnval){ [ 768.323244] env[62522]: value = "task-2415333" [ 768.323244] env[62522]: _type = "Task" [ 768.323244] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.332836] env[62522]: DEBUG oslo_vmware.api [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415333, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.424717] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9c3d01b7-1aeb-477b-9df9-69ce05a47ab3 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Lock "63a7f41d-13cc-420a-96d3-a3f102869137" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.005s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 768.530920] env[62522]: DEBUG nova.network.neutron [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 768.564300] env[62522]: DEBUG oslo_vmware.api [None req-6d00b824-541f-434a-a79a-831353ee687a tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] Task: {'id': task-2415332, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.596027] env[62522]: DEBUG nova.compute.manager [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 768.625408] env[62522]: DEBUG nova.virt.hardware [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:04Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 768.625655] env[62522]: DEBUG nova.virt.hardware [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 768.625810] env[62522]: DEBUG nova.virt.hardware [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 768.626039] env[62522]: DEBUG nova.virt.hardware [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 768.626176] env[62522]: DEBUG nova.virt.hardware [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 768.626324] env[62522]: DEBUG nova.virt.hardware [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 768.626528] env[62522]: DEBUG nova.virt.hardware [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 768.627292] env[62522]: DEBUG nova.virt.hardware [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 768.627578] env[62522]: DEBUG nova.virt.hardware [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 768.627795] env[62522]: DEBUG nova.virt.hardware [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 768.628181] env[62522]: DEBUG nova.virt.hardware [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 768.629109] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8be61bb-7513-46a3-b740-189c18703225 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.637363] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1893347-1900-4050-a862-5a0ac3971bc8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.824225] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.252s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 768.825054] env[62522]: DEBUG oslo_concurrency.lockutils [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.839s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 768.830779] env[62522]: INFO nova.compute.claims [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 768.836094] env[62522]: DEBUG nova.network.neutron [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Updating instance_info_cache with network_info: [{"id": "fa2a1b8f-4097-4665-a83e-74536b00779c", "address": "fa:16:3e:56:51:92", "network": {"id": "c57ecf55-229f-499c-8cf0-0ae209127cf5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1236590821-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff68a180abec48d7bcf3f13e73cfed2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f3a2eb5-353f-45c5-a73b-869626f4bb13", "external-id": "nsx-vlan-transportzone-411", "segmentation_id": 411, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa2a1b8f-40", "ovs_interfaceid": "fa2a1b8f-4097-4665-a83e-74536b00779c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.854269] env[62522]: DEBUG oslo_vmware.api [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415333, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.862011] env[62522]: INFO nova.scheduler.client.report [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Deleted allocations for instance 87a90c88-6e0a-4051-8978-b2f9c5a876ca [ 768.929587] env[62522]: DEBUG nova.compute.manager [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 769.065814] env[62522]: DEBUG oslo_vmware.api [None req-6d00b824-541f-434a-a79a-831353ee687a tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] Task: {'id': task-2415332, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.166148] env[62522]: INFO nova.compute.manager [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Rebuilding instance [ 769.210663] env[62522]: DEBUG nova.compute.manager [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 769.211696] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbd758a7-4cd7-4c59-adbb-fd9ffb61c3d0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.232552] env[62522]: DEBUG nova.compute.manager [req-43244dc8-535a-4e28-adaf-47a5903db7f6 req-e4cc132d-8e1e-4874-82a2-eb80279ba4f6 service nova] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Received event network-changed-fa2a1b8f-4097-4665-a83e-74536b00779c {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 769.232997] env[62522]: DEBUG nova.compute.manager [req-43244dc8-535a-4e28-adaf-47a5903db7f6 req-e4cc132d-8e1e-4874-82a2-eb80279ba4f6 service nova] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Refreshing instance network info cache due to event network-changed-fa2a1b8f-4097-4665-a83e-74536b00779c. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 769.232997] env[62522]: DEBUG oslo_concurrency.lockutils [req-43244dc8-535a-4e28-adaf-47a5903db7f6 req-e4cc132d-8e1e-4874-82a2-eb80279ba4f6 service nova] Acquiring lock "refresh_cache-0d36b844-554e-46e7-9cf9-ef04b67e8898" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 769.347614] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Releasing lock "refresh_cache-0d36b844-554e-46e7-9cf9-ef04b67e8898" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 769.348706] env[62522]: DEBUG nova.compute.manager [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Instance network_info: |[{"id": "fa2a1b8f-4097-4665-a83e-74536b00779c", "address": "fa:16:3e:56:51:92", "network": {"id": "c57ecf55-229f-499c-8cf0-0ae209127cf5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1236590821-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff68a180abec48d7bcf3f13e73cfed2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f3a2eb5-353f-45c5-a73b-869626f4bb13", "external-id": "nsx-vlan-transportzone-411", "segmentation_id": 411, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa2a1b8f-40", "ovs_interfaceid": "fa2a1b8f-4097-4665-a83e-74536b00779c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 769.348706] env[62522]: DEBUG oslo_vmware.api [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415333, 'name': ReconfigVM_Task, 'duration_secs': 0.604699} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.348872] env[62522]: DEBUG oslo_concurrency.lockutils [req-43244dc8-535a-4e28-adaf-47a5903db7f6 req-e4cc132d-8e1e-4874-82a2-eb80279ba4f6 service nova] Acquired lock "refresh_cache-0d36b844-554e-46e7-9cf9-ef04b67e8898" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.349046] env[62522]: DEBUG nova.network.neutron [req-43244dc8-535a-4e28-adaf-47a5903db7f6 req-e4cc132d-8e1e-4874-82a2-eb80279ba4f6 service nova] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Refreshing network info cache for port fa2a1b8f-4097-4665-a83e-74536b00779c {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 769.350096] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:56:51:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9f3a2eb5-353f-45c5-a73b-869626f4bb13', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fa2a1b8f-4097-4665-a83e-74536b00779c', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 769.357266] env[62522]: DEBUG oslo.service.loopingcall [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 769.357483] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Reconfigured VM instance instance-00000025 to attach disk [datastore1] d30397b4-c617-4717-b624-ad1b06331bea/d30397b4-c617-4717-b624-ad1b06331bea.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 769.359130] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 769.359253] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-01b87d51-0af6-442b-943c-25efc20f81ec {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.361039] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4ec10253-ab4e-4032-9bfe-c71b848fb72b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.377829] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5b983803-e996-4d08-9a40-18bbcf319047 tempest-DeleteServersAdminTestJSON-1202237971 tempest-DeleteServersAdminTestJSON-1202237971-project-member] Lock "87a90c88-6e0a-4051-8978-b2f9c5a876ca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.059s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.383818] env[62522]: DEBUG oslo_vmware.api [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for the task: (returnval){ [ 769.383818] env[62522]: value = "task-2415334" [ 769.383818] env[62522]: _type = "Task" [ 769.383818] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.385316] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 769.385316] env[62522]: value = "task-2415335" [ 769.385316] env[62522]: _type = "Task" [ 769.385316] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.396654] env[62522]: DEBUG oslo_vmware.api [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415334, 'name': Rename_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.399953] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415335, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.459610] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.568832] env[62522]: DEBUG oslo_vmware.api [None req-6d00b824-541f-434a-a79a-831353ee687a tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] Task: {'id': task-2415332, 'name': ReconfigVM_Task, 'duration_secs': 1.417396} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.569160] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d00b824-541f-434a-a79a-831353ee687a tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Reconfigured VM instance instance-0000000c to attach disk [datastore1] volume-576c8720-9070-4d05-af30-8ea24f60700e/volume-576c8720-9070-4d05-af30-8ea24f60700e.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 769.575753] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a8acec52-4458-47d4-aa22-f7b26a96b27d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.591557] env[62522]: DEBUG oslo_vmware.api [None req-6d00b824-541f-434a-a79a-831353ee687a tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] Waiting for the task: (returnval){ [ 769.591557] env[62522]: value = "task-2415336" [ 769.591557] env[62522]: _type = "Task" [ 769.591557] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.599749] env[62522]: DEBUG oslo_vmware.api [None req-6d00b824-541f-434a-a79a-831353ee687a tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] Task: {'id': task-2415336, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.900972] env[62522]: DEBUG oslo_vmware.api [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415334, 'name': Rename_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.907954] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415335, 'name': CreateVM_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.110264] env[62522]: DEBUG oslo_vmware.api [None req-6d00b824-541f-434a-a79a-831353ee687a tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] Task: {'id': task-2415336, 'name': ReconfigVM_Task, 'duration_secs': 0.184471} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.110264] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d00b824-541f-434a-a79a-831353ee687a tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489667', 'volume_id': '576c8720-9070-4d05-af30-8ea24f60700e', 'name': 'volume-576c8720-9070-4d05-af30-8ea24f60700e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'bf2ccaeb-610a-437b-be94-d3caefbe15c5', 'attached_at': '', 'detached_at': '', 'volume_id': '576c8720-9070-4d05-af30-8ea24f60700e', 'serial': '576c8720-9070-4d05-af30-8ea24f60700e'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 770.226315] env[62522]: DEBUG nova.network.neutron [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Successfully updated port: bd57fd68-6d70-40a6-beb5-73e810ccf037 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 770.230821] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 770.234783] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f1bfbbe1-489a-4a87-a84e-2be4dbe2ab4c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.243564] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Waiting for the task: (returnval){ [ 770.243564] env[62522]: value = "task-2415337" [ 770.243564] env[62522]: _type = "Task" [ 770.243564] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.258137] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415337, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.343717] env[62522]: DEBUG nova.network.neutron [req-43244dc8-535a-4e28-adaf-47a5903db7f6 req-e4cc132d-8e1e-4874-82a2-eb80279ba4f6 service nova] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Updated VIF entry in instance network info cache for port fa2a1b8f-4097-4665-a83e-74536b00779c. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 770.343717] env[62522]: DEBUG nova.network.neutron [req-43244dc8-535a-4e28-adaf-47a5903db7f6 req-e4cc132d-8e1e-4874-82a2-eb80279ba4f6 service nova] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Updating instance_info_cache with network_info: [{"id": "fa2a1b8f-4097-4665-a83e-74536b00779c", "address": "fa:16:3e:56:51:92", "network": {"id": "c57ecf55-229f-499c-8cf0-0ae209127cf5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1236590821-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff68a180abec48d7bcf3f13e73cfed2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f3a2eb5-353f-45c5-a73b-869626f4bb13", "external-id": "nsx-vlan-transportzone-411", "segmentation_id": 411, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa2a1b8f-40", "ovs_interfaceid": "fa2a1b8f-4097-4665-a83e-74536b00779c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.402103] env[62522]: DEBUG oslo_vmware.api [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415334, 'name': Rename_Task, 'duration_secs': 0.605128} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.404366] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 770.404366] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-59409557-9696-45e9-a451-7d0377ab9430 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.409322] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415335, 'name': CreateVM_Task, 'duration_secs': 0.875996} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.409854] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 770.410535] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 770.410701] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.411016] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 770.411259] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c54076b0-aa6c-4d61-ae4e-3fa1b2638202 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.414591] env[62522]: DEBUG oslo_vmware.api [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for the task: (returnval){ [ 770.414591] env[62522]: value = "task-2415338" [ 770.414591] env[62522]: _type = "Task" [ 770.414591] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.415950] env[62522]: DEBUG oslo_vmware.api [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for the task: (returnval){ [ 770.415950] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f9dbb6-b623-f635-3148-1c2931863244" [ 770.415950] env[62522]: _type = "Task" [ 770.415950] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.428753] env[62522]: DEBUG oslo_vmware.api [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415338, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.433035] env[62522]: DEBUG oslo_vmware.api [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f9dbb6-b623-f635-3148-1c2931863244, 'name': SearchDatastore_Task, 'duration_secs': 0.009803} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.433141] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 770.433375] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 770.433602] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 770.433746] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.433919] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 770.434738] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c3566a9a-d7c7-4dc8-bd50-29de6601d086 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.443766] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 770.443766] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 770.443881] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3146587a-13b0-4603-8487-5cf554980073 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.449330] env[62522]: DEBUG oslo_vmware.api [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for the task: (returnval){ [ 770.449330] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]528ee339-2c6a-5ef6-6612-a8a9525b3bcd" [ 770.449330] env[62522]: _type = "Task" [ 770.449330] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.456582] env[62522]: DEBUG oslo_vmware.api [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]528ee339-2c6a-5ef6-6612-a8a9525b3bcd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.490673] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c96049f2-81bf-483e-8c68-2a0ad4de2672 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.498641] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7959b9e1-47d4-4cf7-b5d3-3c870ce2e681 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.532268] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf9772f7-46a0-40ef-b5a9-d148cae38b87 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.540069] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd609af7-112f-49eb-ba98-a506c32e4d02 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.554477] env[62522]: DEBUG nova.compute.provider_tree [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 770.732617] env[62522]: DEBUG oslo_concurrency.lockutils [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquiring lock "refresh_cache-d6935c9b-e4cc-47ed-96d5-e485d60382d6" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 770.732780] env[62522]: DEBUG oslo_concurrency.lockutils [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquired lock "refresh_cache-d6935c9b-e4cc-47ed-96d5-e485d60382d6" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.733119] env[62522]: DEBUG nova.network.neutron [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 770.756424] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415337, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.847064] env[62522]: DEBUG oslo_concurrency.lockutils [req-43244dc8-535a-4e28-adaf-47a5903db7f6 req-e4cc132d-8e1e-4874-82a2-eb80279ba4f6 service nova] Releasing lock "refresh_cache-0d36b844-554e-46e7-9cf9-ef04b67e8898" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 770.927922] env[62522]: DEBUG oslo_vmware.api [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415338, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.960232] env[62522]: DEBUG oslo_vmware.api [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]528ee339-2c6a-5ef6-6612-a8a9525b3bcd, 'name': SearchDatastore_Task, 'duration_secs': 0.009635} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.961497] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-372cbe72-f0da-4655-b6bf-12a9dfc3fecc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.967310] env[62522]: DEBUG oslo_vmware.api [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for the task: (returnval){ [ 770.967310] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52dfa88e-b4d4-baf2-695f-1bf17abb6e4b" [ 770.967310] env[62522]: _type = "Task" [ 770.967310] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.977436] env[62522]: DEBUG oslo_vmware.api [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52dfa88e-b4d4-baf2-695f-1bf17abb6e4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.057529] env[62522]: DEBUG nova.scheduler.client.report [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 771.171507] env[62522]: DEBUG nova.objects.instance [None req-6d00b824-541f-434a-a79a-831353ee687a tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] Lazy-loading 'flavor' on Instance uuid bf2ccaeb-610a-437b-be94-d3caefbe15c5 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 771.261370] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415337, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.282575] env[62522]: DEBUG nova.network.neutron [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 771.398020] env[62522]: DEBUG nova.compute.manager [req-cc9dabb3-8a0c-48e8-be87-27e4cf96ae9d req-cd3b3fe9-28ce-4998-8be8-896d89573856 service nova] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Received event network-vif-plugged-bd57fd68-6d70-40a6-beb5-73e810ccf037 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 771.398020] env[62522]: DEBUG oslo_concurrency.lockutils [req-cc9dabb3-8a0c-48e8-be87-27e4cf96ae9d req-cd3b3fe9-28ce-4998-8be8-896d89573856 service nova] Acquiring lock "d6935c9b-e4cc-47ed-96d5-e485d60382d6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 771.398020] env[62522]: DEBUG oslo_concurrency.lockutils [req-cc9dabb3-8a0c-48e8-be87-27e4cf96ae9d req-cd3b3fe9-28ce-4998-8be8-896d89573856 service nova] Lock "d6935c9b-e4cc-47ed-96d5-e485d60382d6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 771.398020] env[62522]: DEBUG oslo_concurrency.lockutils [req-cc9dabb3-8a0c-48e8-be87-27e4cf96ae9d req-cd3b3fe9-28ce-4998-8be8-896d89573856 service nova] Lock "d6935c9b-e4cc-47ed-96d5-e485d60382d6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 771.398020] env[62522]: DEBUG nova.compute.manager [req-cc9dabb3-8a0c-48e8-be87-27e4cf96ae9d req-cd3b3fe9-28ce-4998-8be8-896d89573856 service nova] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] No waiting events found dispatching network-vif-plugged-bd57fd68-6d70-40a6-beb5-73e810ccf037 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 771.398698] env[62522]: WARNING nova.compute.manager [req-cc9dabb3-8a0c-48e8-be87-27e4cf96ae9d req-cd3b3fe9-28ce-4998-8be8-896d89573856 service nova] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Received unexpected event network-vif-plugged-bd57fd68-6d70-40a6-beb5-73e810ccf037 for instance with vm_state building and task_state spawning. [ 771.398698] env[62522]: DEBUG nova.compute.manager [req-cc9dabb3-8a0c-48e8-be87-27e4cf96ae9d req-cd3b3fe9-28ce-4998-8be8-896d89573856 service nova] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Received event network-changed-bd57fd68-6d70-40a6-beb5-73e810ccf037 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 771.398756] env[62522]: DEBUG nova.compute.manager [req-cc9dabb3-8a0c-48e8-be87-27e4cf96ae9d req-cd3b3fe9-28ce-4998-8be8-896d89573856 service nova] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Refreshing instance network info cache due to event network-changed-bd57fd68-6d70-40a6-beb5-73e810ccf037. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 771.398995] env[62522]: DEBUG oslo_concurrency.lockutils [req-cc9dabb3-8a0c-48e8-be87-27e4cf96ae9d req-cd3b3fe9-28ce-4998-8be8-896d89573856 service nova] Acquiring lock "refresh_cache-d6935c9b-e4cc-47ed-96d5-e485d60382d6" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 771.430646] env[62522]: DEBUG oslo_vmware.api [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415338, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.479540] env[62522]: DEBUG oslo_vmware.api [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52dfa88e-b4d4-baf2-695f-1bf17abb6e4b, 'name': SearchDatastore_Task, 'duration_secs': 0.008931} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.479817] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 771.480091] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 0d36b844-554e-46e7-9cf9-ef04b67e8898/0d36b844-554e-46e7-9cf9-ef04b67e8898.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 771.480451] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-695a2398-6e97-45bc-90c2-3c85b6137360 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.489778] env[62522]: DEBUG oslo_vmware.api [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for the task: (returnval){ [ 771.489778] env[62522]: value = "task-2415339" [ 771.489778] env[62522]: _type = "Task" [ 771.489778] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.500509] env[62522]: DEBUG oslo_vmware.api [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415339, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.507522] env[62522]: DEBUG nova.network.neutron [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Updating instance_info_cache with network_info: [{"id": "bd57fd68-6d70-40a6-beb5-73e810ccf037", "address": "fa:16:3e:a7:ad:1f", "network": {"id": "c57ecf55-229f-499c-8cf0-0ae209127cf5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1236590821-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff68a180abec48d7bcf3f13e73cfed2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f3a2eb5-353f-45c5-a73b-869626f4bb13", "external-id": "nsx-vlan-transportzone-411", "segmentation_id": 411, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd57fd68-6d", "ovs_interfaceid": "bd57fd68-6d70-40a6-beb5-73e810ccf037", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.562963] env[62522]: DEBUG oslo_concurrency.lockutils [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.738s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 771.563553] env[62522]: DEBUG nova.compute.manager [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 771.567812] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.746s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 771.570635] env[62522]: INFO nova.compute.claims [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 771.676608] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d00b824-541f-434a-a79a-831353ee687a tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] Lock "bf2ccaeb-610a-437b-be94-d3caefbe15c5" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.820s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 771.759288] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415337, 'name': PowerOffVM_Task, 'duration_secs': 1.101675} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.760014] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 771.762657] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 771.762657] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f1a5c19-b67a-4214-8f50-08c5ae66e3d4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.768879] env[62522]: DEBUG oslo_concurrency.lockutils [None req-569f0b3b-f6ec-4f2c-9aa2-32eb49e84bf8 tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] Acquiring lock "bf2ccaeb-610a-437b-be94-d3caefbe15c5" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 771.769190] env[62522]: DEBUG oslo_concurrency.lockutils [None req-569f0b3b-f6ec-4f2c-9aa2-32eb49e84bf8 tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] Lock "bf2ccaeb-610a-437b-be94-d3caefbe15c5" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 771.778075] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 771.780547] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a439b58a-313b-4213-a08e-a4e6e3c88b8c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.806148] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 771.806387] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 771.806575] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Deleting the datastore file [datastore1] 63a7f41d-13cc-420a-96d3-a3f102869137 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 771.807219] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b841b9e6-5d81-4078-ad3f-7ca4d7738e2e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.814571] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Waiting for the task: (returnval){ [ 771.814571] env[62522]: value = "task-2415341" [ 771.814571] env[62522]: _type = "Task" [ 771.814571] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.824684] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415341, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.936312] env[62522]: DEBUG oslo_vmware.api [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415338, 'name': PowerOnVM_Task, 'duration_secs': 1.352034} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.936312] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 771.936312] env[62522]: INFO nova.compute.manager [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Took 10.77 seconds to spawn the instance on the hypervisor. [ 771.938142] env[62522]: DEBUG nova.compute.manager [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 771.939116] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9baf00dc-7dea-479e-8e1f-3b9c3225dfbc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.001973] env[62522]: DEBUG oslo_vmware.api [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415339, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.510432} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.002370] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 0d36b844-554e-46e7-9cf9-ef04b67e8898/0d36b844-554e-46e7-9cf9-ef04b67e8898.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 772.002657] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 772.003228] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-23616055-fa9b-43c6-b93f-e3799601fd50 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.010196] env[62522]: DEBUG oslo_vmware.api [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for the task: (returnval){ [ 772.010196] env[62522]: value = "task-2415342" [ 772.010196] env[62522]: _type = "Task" [ 772.010196] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.010671] env[62522]: DEBUG oslo_concurrency.lockutils [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Releasing lock "refresh_cache-d6935c9b-e4cc-47ed-96d5-e485d60382d6" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 772.010958] env[62522]: DEBUG nova.compute.manager [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Instance network_info: |[{"id": "bd57fd68-6d70-40a6-beb5-73e810ccf037", "address": "fa:16:3e:a7:ad:1f", "network": {"id": "c57ecf55-229f-499c-8cf0-0ae209127cf5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1236590821-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff68a180abec48d7bcf3f13e73cfed2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f3a2eb5-353f-45c5-a73b-869626f4bb13", "external-id": "nsx-vlan-transportzone-411", "segmentation_id": 411, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd57fd68-6d", "ovs_interfaceid": "bd57fd68-6d70-40a6-beb5-73e810ccf037", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 772.014423] env[62522]: DEBUG oslo_concurrency.lockutils [req-cc9dabb3-8a0c-48e8-be87-27e4cf96ae9d req-cd3b3fe9-28ce-4998-8be8-896d89573856 service nova] Acquired lock "refresh_cache-d6935c9b-e4cc-47ed-96d5-e485d60382d6" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.014640] env[62522]: DEBUG nova.network.neutron [req-cc9dabb3-8a0c-48e8-be87-27e4cf96ae9d req-cd3b3fe9-28ce-4998-8be8-896d89573856 service nova] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Refreshing network info cache for port bd57fd68-6d70-40a6-beb5-73e810ccf037 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 772.019337] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a7:ad:1f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9f3a2eb5-353f-45c5-a73b-869626f4bb13', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bd57fd68-6d70-40a6-beb5-73e810ccf037', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 772.024482] env[62522]: DEBUG oslo.service.loopingcall [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 772.024633] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 772.025295] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8941672e-caed-4d2f-a685-086bb2646b40 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.045780] env[62522]: DEBUG oslo_vmware.api [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415342, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.051217] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 772.051217] env[62522]: value = "task-2415343" [ 772.051217] env[62522]: _type = "Task" [ 772.051217] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.058812] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415343, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.076582] env[62522]: DEBUG nova.compute.utils [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 772.080950] env[62522]: DEBUG nova.compute.manager [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 772.081193] env[62522]: DEBUG nova.network.neutron [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 772.137320] env[62522]: DEBUG nova.policy [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3549d85b612044969af8fda179d169ba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61314d3f0b9e4c368312e714a953e549', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 772.272353] env[62522]: INFO nova.compute.manager [None req-569f0b3b-f6ec-4f2c-9aa2-32eb49e84bf8 tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Detaching volume 576c8720-9070-4d05-af30-8ea24f60700e [ 772.322873] env[62522]: INFO nova.virt.block_device [None req-569f0b3b-f6ec-4f2c-9aa2-32eb49e84bf8 tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Attempting to driver detach volume 576c8720-9070-4d05-af30-8ea24f60700e from mountpoint /dev/sdb [ 772.323330] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-569f0b3b-f6ec-4f2c-9aa2-32eb49e84bf8 tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Volume detach. Driver type: vmdk {{(pid=62522) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 772.323453] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-569f0b3b-f6ec-4f2c-9aa2-32eb49e84bf8 tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489667', 'volume_id': '576c8720-9070-4d05-af30-8ea24f60700e', 'name': 'volume-576c8720-9070-4d05-af30-8ea24f60700e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'bf2ccaeb-610a-437b-be94-d3caefbe15c5', 'attached_at': '', 'detached_at': '', 'volume_id': '576c8720-9070-4d05-af30-8ea24f60700e', 'serial': '576c8720-9070-4d05-af30-8ea24f60700e'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 772.324584] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfa76316-f2aa-416c-b7af-5b88909c7018 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.350749] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415341, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.234507} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.351557] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 772.351758] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 772.351941] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 772.356548] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0027c9c3-5e46-4e0c-b452-5346b743ee37 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.365824] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7266ef45-3f22-4a4a-8a0f-58d950287664 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.388859] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a1d85a7-b43b-4abb-b02d-c7398eec6648 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.405331] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-569f0b3b-f6ec-4f2c-9aa2-32eb49e84bf8 tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] The volume has not been displaced from its original location: [datastore1] volume-576c8720-9070-4d05-af30-8ea24f60700e/volume-576c8720-9070-4d05-af30-8ea24f60700e.vmdk. No consolidation needed. {{(pid=62522) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 772.411833] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-569f0b3b-f6ec-4f2c-9aa2-32eb49e84bf8 tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Reconfiguring VM instance instance-0000000c to detach disk 2001 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 772.413430] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b1660e7-a2af-4ffa-9717-8a2e7ca1678d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.433026] env[62522]: DEBUG oslo_vmware.api [None req-569f0b3b-f6ec-4f2c-9aa2-32eb49e84bf8 tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] Waiting for the task: (returnval){ [ 772.433026] env[62522]: value = "task-2415344" [ 772.433026] env[62522]: _type = "Task" [ 772.433026] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.442890] env[62522]: DEBUG oslo_vmware.api [None req-569f0b3b-f6ec-4f2c-9aa2-32eb49e84bf8 tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] Task: {'id': task-2415344, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.461194] env[62522]: INFO nova.compute.manager [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Took 41.67 seconds to build instance. [ 772.521914] env[62522]: DEBUG oslo_vmware.api [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415342, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064276} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.522559] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 772.524183] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-497b0dca-0e91-45ee-8d03-2cbc76dba1f4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.546488] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] 0d36b844-554e-46e7-9cf9-ef04b67e8898/0d36b844-554e-46e7-9cf9-ef04b67e8898.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 772.548118] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-64f4d88d-3b7f-4834-9746-c6a2148e9c17 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.571320] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415343, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.572662] env[62522]: DEBUG oslo_vmware.api [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for the task: (returnval){ [ 772.572662] env[62522]: value = "task-2415345" [ 772.572662] env[62522]: _type = "Task" [ 772.572662] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.580937] env[62522]: DEBUG nova.compute.manager [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 772.583505] env[62522]: DEBUG oslo_vmware.api [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415345, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.667775] env[62522]: DEBUG nova.network.neutron [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Successfully created port: aa9e5bf2-d99c-44ec-8c21-4aa8866616e0 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 772.820976] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Acquiring lock "a10c4dee-4490-445a-bea2-9f8ef5425d15" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 772.821181] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Lock "a10c4dee-4490-445a-bea2-9f8ef5425d15" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 772.942320] env[62522]: DEBUG oslo_vmware.api [None req-569f0b3b-f6ec-4f2c-9aa2-32eb49e84bf8 tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] Task: {'id': task-2415344, 'name': ReconfigVM_Task, 'duration_secs': 0.231313} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.944955] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-569f0b3b-f6ec-4f2c-9aa2-32eb49e84bf8 tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Reconfigured VM instance instance-0000000c to detach disk 2001 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 772.950123] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3aed0a9e-aed5-44ce-86dd-efdcfdc487f8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.964343] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b293f602-527d-48e4-b84a-138a60077f58 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lock "d30397b4-c617-4717-b624-ad1b06331bea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.262s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 772.972022] env[62522]: DEBUG oslo_vmware.api [None req-569f0b3b-f6ec-4f2c-9aa2-32eb49e84bf8 tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] Waiting for the task: (returnval){ [ 772.972022] env[62522]: value = "task-2415346" [ 772.972022] env[62522]: _type = "Task" [ 772.972022] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.981854] env[62522]: DEBUG oslo_vmware.api [None req-569f0b3b-f6ec-4f2c-9aa2-32eb49e84bf8 tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] Task: {'id': task-2415346, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.075114] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415343, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.093200] env[62522]: DEBUG oslo_vmware.api [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415345, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.167980] env[62522]: DEBUG nova.network.neutron [req-cc9dabb3-8a0c-48e8-be87-27e4cf96ae9d req-cd3b3fe9-28ce-4998-8be8-896d89573856 service nova] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Updated VIF entry in instance network info cache for port bd57fd68-6d70-40a6-beb5-73e810ccf037. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 773.167980] env[62522]: DEBUG nova.network.neutron [req-cc9dabb3-8a0c-48e8-be87-27e4cf96ae9d req-cd3b3fe9-28ce-4998-8be8-896d89573856 service nova] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Updating instance_info_cache with network_info: [{"id": "bd57fd68-6d70-40a6-beb5-73e810ccf037", "address": "fa:16:3e:a7:ad:1f", "network": {"id": "c57ecf55-229f-499c-8cf0-0ae209127cf5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1236590821-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff68a180abec48d7bcf3f13e73cfed2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f3a2eb5-353f-45c5-a73b-869626f4bb13", "external-id": "nsx-vlan-transportzone-411", "segmentation_id": 411, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd57fd68-6d", "ovs_interfaceid": "bd57fd68-6d70-40a6-beb5-73e810ccf037", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.189944] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d6d3a86-86ef-4ffb-bee0-74854dfb88f0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.198071] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c6a7ccc-1aed-4be6-aa4a-6c3946b7bbf8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.238456] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c37f5924-bd74-4490-8867-0441c99cd1e1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.247042] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bc3251c-f12a-49bb-bc92-0fb19f02db39 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.264494] env[62522]: DEBUG nova.compute.provider_tree [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 773.404192] env[62522]: DEBUG nova.virt.hardware [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 773.404473] env[62522]: DEBUG nova.virt.hardware [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 773.404473] env[62522]: DEBUG nova.virt.hardware [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 773.404601] env[62522]: DEBUG nova.virt.hardware [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 773.404749] env[62522]: DEBUG nova.virt.hardware [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 773.404898] env[62522]: DEBUG nova.virt.hardware [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 773.405125] env[62522]: DEBUG nova.virt.hardware [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 773.405384] env[62522]: DEBUG nova.virt.hardware [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 773.405451] env[62522]: DEBUG nova.virt.hardware [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 773.405761] env[62522]: DEBUG nova.virt.hardware [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 773.405982] env[62522]: DEBUG nova.virt.hardware [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 773.407463] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f6d8a11-b28a-4896-a9e8-1b1c767595e2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.416163] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31a078aa-2a63-494e-98ce-4c7c4b1d3d8d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.430518] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Instance VIF info [] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 773.436280] env[62522]: DEBUG oslo.service.loopingcall [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 773.437023] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 773.437023] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-034cf5cf-da61-4251-8b30-a3574eebc512 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.453200] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 773.453200] env[62522]: value = "task-2415347" [ 773.453200] env[62522]: _type = "Task" [ 773.453200] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.460688] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415347, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.471358] env[62522]: DEBUG nova.compute.manager [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 773.485274] env[62522]: DEBUG oslo_vmware.api [None req-569f0b3b-f6ec-4f2c-9aa2-32eb49e84bf8 tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] Task: {'id': task-2415346, 'name': ReconfigVM_Task, 'duration_secs': 0.151151} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.485274] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-569f0b3b-f6ec-4f2c-9aa2-32eb49e84bf8 tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489667', 'volume_id': '576c8720-9070-4d05-af30-8ea24f60700e', 'name': 'volume-576c8720-9070-4d05-af30-8ea24f60700e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'bf2ccaeb-610a-437b-be94-d3caefbe15c5', 'attached_at': '', 'detached_at': '', 'volume_id': '576c8720-9070-4d05-af30-8ea24f60700e', 'serial': '576c8720-9070-4d05-af30-8ea24f60700e'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 773.576057] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415343, 'name': CreateVM_Task, 'duration_secs': 1.455219} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.579561] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 773.580195] env[62522]: DEBUG oslo_concurrency.lockutils [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 773.580388] env[62522]: DEBUG oslo_concurrency.lockutils [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.580698] env[62522]: DEBUG oslo_concurrency.lockutils [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 773.581391] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3254c175-627d-4601-9ccf-bdad48aefffe {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.586882] env[62522]: DEBUG oslo_vmware.api [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415345, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.590126] env[62522]: DEBUG oslo_vmware.api [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for the task: (returnval){ [ 773.590126] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5266df5b-2d6c-eb66-30f4-fb636fefe36a" [ 773.590126] env[62522]: _type = "Task" [ 773.590126] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.595674] env[62522]: DEBUG nova.compute.manager [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 773.603963] env[62522]: DEBUG oslo_vmware.api [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5266df5b-2d6c-eb66-30f4-fb636fefe36a, 'name': SearchDatastore_Task, 'duration_secs': 0.009226} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.604577] env[62522]: DEBUG oslo_concurrency.lockutils [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 773.604817] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 773.605063] env[62522]: DEBUG oslo_concurrency.lockutils [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 773.605215] env[62522]: DEBUG oslo_concurrency.lockutils [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.605466] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 773.605644] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c47a2874-ed73-4705-84d0-3b1d515c1da4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.614480] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 773.614827] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 773.615446] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2662d82-9ce9-4fa3-b811-f5a7a8098d8f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.621483] env[62522]: DEBUG nova.virt.hardware [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 773.621792] env[62522]: DEBUG nova.virt.hardware [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 773.622052] env[62522]: DEBUG nova.virt.hardware [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 773.622312] env[62522]: DEBUG nova.virt.hardware [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 773.622589] env[62522]: DEBUG nova.virt.hardware [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 773.622790] env[62522]: DEBUG nova.virt.hardware [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 773.623056] env[62522]: DEBUG nova.virt.hardware [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 773.624670] env[62522]: DEBUG nova.virt.hardware [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 773.624873] env[62522]: DEBUG nova.virt.hardware [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 773.625057] env[62522]: DEBUG nova.virt.hardware [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 773.625241] env[62522]: DEBUG nova.virt.hardware [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 773.626043] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f94ede49-8ea7-4df5-8b61-9425d3e2f0d6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.629837] env[62522]: DEBUG oslo_vmware.api [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for the task: (returnval){ [ 773.629837] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526cee89-4f79-00c5-d74d-1f363aaae946" [ 773.629837] env[62522]: _type = "Task" [ 773.629837] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.636777] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a096c3eb-e999-4682-b7fb-b18e19f34874 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.643775] env[62522]: DEBUG oslo_vmware.api [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526cee89-4f79-00c5-d74d-1f363aaae946, 'name': SearchDatastore_Task, 'duration_secs': 0.007805} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.644841] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eec2ebfa-08cc-4254-979a-035942629c84 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.659666] env[62522]: DEBUG oslo_vmware.api [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for the task: (returnval){ [ 773.659666] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c73413-a2a8-bdd7-e3dc-7834256d198f" [ 773.659666] env[62522]: _type = "Task" [ 773.659666] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.666807] env[62522]: DEBUG oslo_vmware.api [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c73413-a2a8-bdd7-e3dc-7834256d198f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.672433] env[62522]: DEBUG oslo_concurrency.lockutils [req-cc9dabb3-8a0c-48e8-be87-27e4cf96ae9d req-cd3b3fe9-28ce-4998-8be8-896d89573856 service nova] Releasing lock "refresh_cache-d6935c9b-e4cc-47ed-96d5-e485d60382d6" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 773.767833] env[62522]: DEBUG nova.scheduler.client.report [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 773.963751] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415347, 'name': CreateVM_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.996877] env[62522]: DEBUG oslo_concurrency.lockutils [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 774.054639] env[62522]: DEBUG nova.objects.instance [None req-569f0b3b-f6ec-4f2c-9aa2-32eb49e84bf8 tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] Lazy-loading 'flavor' on Instance uuid bf2ccaeb-610a-437b-be94-d3caefbe15c5 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 774.085513] env[62522]: DEBUG oslo_vmware.api [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415345, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.169815] env[62522]: DEBUG oslo_vmware.api [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c73413-a2a8-bdd7-e3dc-7834256d198f, 'name': SearchDatastore_Task, 'duration_secs': 0.009614} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.170168] env[62522]: DEBUG oslo_concurrency.lockutils [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 774.170452] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] d6935c9b-e4cc-47ed-96d5-e485d60382d6/d6935c9b-e4cc-47ed-96d5-e485d60382d6.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 774.170719] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c9ae54ee-f5ee-4cc9-86f7-eeba4e0750ec {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.180849] env[62522]: DEBUG oslo_vmware.api [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for the task: (returnval){ [ 774.180849] env[62522]: value = "task-2415348" [ 774.180849] env[62522]: _type = "Task" [ 774.180849] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.190810] env[62522]: DEBUG oslo_vmware.api [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415348, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.273754] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.707s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 774.274400] env[62522]: DEBUG nova.compute.manager [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 774.277621] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.367s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 774.277972] env[62522]: DEBUG nova.objects.instance [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Lazy-loading 'resources' on Instance uuid 194c1dd8-3b0a-4c29-9779-65f1534121d1 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 774.466888] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415347, 'name': CreateVM_Task, 'duration_secs': 0.88662} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.467269] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 774.467788] env[62522]: DEBUG oslo_concurrency.lockutils [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 774.467788] env[62522]: DEBUG oslo_concurrency.lockutils [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.468183] env[62522]: DEBUG oslo_concurrency.lockutils [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 774.468518] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e7011ab-d161-442e-afc0-c8a9bc9cc883 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.474021] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Waiting for the task: (returnval){ [ 774.474021] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529dbbd6-e252-b2b1-3614-1c08097a1977" [ 774.474021] env[62522]: _type = "Task" [ 774.474021] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.482990] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529dbbd6-e252-b2b1-3614-1c08097a1977, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.588020] env[62522]: DEBUG oslo_vmware.api [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415345, 'name': ReconfigVM_Task, 'duration_secs': 1.635557} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.588020] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Reconfigured VM instance instance-00000026 to attach disk [datastore1] 0d36b844-554e-46e7-9cf9-ef04b67e8898/0d36b844-554e-46e7-9cf9-ef04b67e8898.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 774.588770] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6e1f19ce-37a8-4b33-8327-43213ada95d8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.596531] env[62522]: DEBUG oslo_vmware.api [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for the task: (returnval){ [ 774.596531] env[62522]: value = "task-2415349" [ 774.596531] env[62522]: _type = "Task" [ 774.596531] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.606055] env[62522]: DEBUG oslo_vmware.api [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415349, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.690794] env[62522]: DEBUG oslo_vmware.api [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415348, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.781188] env[62522]: DEBUG nova.compute.utils [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 774.785558] env[62522]: DEBUG nova.compute.manager [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 774.785736] env[62522]: DEBUG nova.network.neutron [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 774.991264] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529dbbd6-e252-b2b1-3614-1c08097a1977, 'name': SearchDatastore_Task, 'duration_secs': 0.070739} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.991264] env[62522]: DEBUG oslo_concurrency.lockutils [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 774.991264] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 774.992092] env[62522]: DEBUG oslo_concurrency.lockutils [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 774.992532] env[62522]: DEBUG oslo_concurrency.lockutils [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.992997] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 774.995790] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-813f9570-3470-440b-b6cd-57b713ee8bde {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.009294] env[62522]: DEBUG nova.policy [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0b81d399f06a47bc819693b52bb74004', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ff5da278d2be4ca983424c8291beadec', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 775.009294] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 775.009294] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 775.009294] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6327edc5-d5bb-4d0c-b419-2d96e349dfdf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.014443] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Waiting for the task: (returnval){ [ 775.014443] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5282ee08-6673-3652-164b-97d748ac0eb0" [ 775.014443] env[62522]: _type = "Task" [ 775.014443] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.022940] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5282ee08-6673-3652-164b-97d748ac0eb0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.064914] env[62522]: DEBUG oslo_concurrency.lockutils [None req-569f0b3b-f6ec-4f2c-9aa2-32eb49e84bf8 tempest-VolumesAssistedSnapshotsTest-1201329058 tempest-VolumesAssistedSnapshotsTest-1201329058-project-admin] Lock "bf2ccaeb-610a-437b-be94-d3caefbe15c5" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.295s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 775.111933] env[62522]: DEBUG oslo_vmware.api [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415349, 'name': Rename_Task, 'duration_secs': 0.284485} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.112277] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 775.112551] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ae6412c1-ba76-4444-b9a8-14fb1b72bef8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.123384] env[62522]: DEBUG oslo_vmware.api [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for the task: (returnval){ [ 775.123384] env[62522]: value = "task-2415350" [ 775.123384] env[62522]: _type = "Task" [ 775.123384] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.133477] env[62522]: DEBUG oslo_vmware.api [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415350, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.195143] env[62522]: DEBUG oslo_vmware.api [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415348, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.520077} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.195494] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] d6935c9b-e4cc-47ed-96d5-e485d60382d6/d6935c9b-e4cc-47ed-96d5-e485d60382d6.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 775.195809] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 775.196095] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fda65292-7715-4c1e-9a1f-89d0cfc99ceb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.205079] env[62522]: DEBUG oslo_vmware.api [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for the task: (returnval){ [ 775.205079] env[62522]: value = "task-2415352" [ 775.205079] env[62522]: _type = "Task" [ 775.205079] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.213388] env[62522]: DEBUG oslo_vmware.api [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415352, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.266991] env[62522]: DEBUG nova.compute.manager [req-4dc7dddc-fe9c-4ddb-aadf-6e7f6a09057a req-a8b90fab-fcac-4847-9d5c-cc9dfb04711a service nova] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Received event network-vif-plugged-aa9e5bf2-d99c-44ec-8c21-4aa8866616e0 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 775.267234] env[62522]: DEBUG oslo_concurrency.lockutils [req-4dc7dddc-fe9c-4ddb-aadf-6e7f6a09057a req-a8b90fab-fcac-4847-9d5c-cc9dfb04711a service nova] Acquiring lock "ee1c638b-1f38-4e21-9369-4d4ff2e13d46-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 775.267439] env[62522]: DEBUG oslo_concurrency.lockutils [req-4dc7dddc-fe9c-4ddb-aadf-6e7f6a09057a req-a8b90fab-fcac-4847-9d5c-cc9dfb04711a service nova] Lock "ee1c638b-1f38-4e21-9369-4d4ff2e13d46-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 775.267607] env[62522]: DEBUG oslo_concurrency.lockutils [req-4dc7dddc-fe9c-4ddb-aadf-6e7f6a09057a req-a8b90fab-fcac-4847-9d5c-cc9dfb04711a service nova] Lock "ee1c638b-1f38-4e21-9369-4d4ff2e13d46-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 775.267778] env[62522]: DEBUG nova.compute.manager [req-4dc7dddc-fe9c-4ddb-aadf-6e7f6a09057a req-a8b90fab-fcac-4847-9d5c-cc9dfb04711a service nova] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] No waiting events found dispatching network-vif-plugged-aa9e5bf2-d99c-44ec-8c21-4aa8866616e0 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 775.267944] env[62522]: WARNING nova.compute.manager [req-4dc7dddc-fe9c-4ddb-aadf-6e7f6a09057a req-a8b90fab-fcac-4847-9d5c-cc9dfb04711a service nova] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Received unexpected event network-vif-plugged-aa9e5bf2-d99c-44ec-8c21-4aa8866616e0 for instance with vm_state building and task_state spawning. [ 775.285850] env[62522]: DEBUG nova.compute.manager [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 775.324645] env[62522]: DEBUG nova.network.neutron [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Successfully updated port: aa9e5bf2-d99c-44ec-8c21-4aa8866616e0 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 775.433519] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2a28943-0cf1-4295-83b2-375fe28ac309 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.441186] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b151a92d-66ad-457b-ac92-84801afdcd40 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.474780] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-186227e5-f646-4195-8692-fafa38713576 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.483030] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c2670d-7fd7-403c-9c14-8ecd1e9405d1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.497035] env[62522]: DEBUG nova.compute.provider_tree [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 775.524911] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5282ee08-6673-3652-164b-97d748ac0eb0, 'name': SearchDatastore_Task, 'duration_secs': 0.009084} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.525619] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ee58418-dcad-40bb-87b5-588f3b25dd6d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.532408] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Waiting for the task: (returnval){ [ 775.532408] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a23641-1bf3-89f7-bca8-219ec881519d" [ 775.532408] env[62522]: _type = "Task" [ 775.532408] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.542965] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a23641-1bf3-89f7-bca8-219ec881519d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.632845] env[62522]: DEBUG oslo_vmware.api [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415350, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.652051] env[62522]: DEBUG nova.network.neutron [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Successfully created port: def24237-7aea-42f2-a529-09d7bd81d5ab {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 775.715073] env[62522]: DEBUG oslo_vmware.api [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415352, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072231} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.715223] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 775.715929] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b811a79-a3fb-4d57-be3d-acb6953a0836 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.737426] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Reconfiguring VM instance instance-00000027 to attach disk [datastore2] d6935c9b-e4cc-47ed-96d5-e485d60382d6/d6935c9b-e4cc-47ed-96d5-e485d60382d6.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 775.737696] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7aa5da8e-8b7d-4bf5-980e-49278f8307ea {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.756634] env[62522]: DEBUG oslo_vmware.api [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for the task: (returnval){ [ 775.756634] env[62522]: value = "task-2415353" [ 775.756634] env[62522]: _type = "Task" [ 775.756634] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.764546] env[62522]: DEBUG oslo_vmware.api [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415353, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.829820] env[62522]: DEBUG oslo_concurrency.lockutils [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "refresh_cache-ee1c638b-1f38-4e21-9369-4d4ff2e13d46" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 775.829820] env[62522]: DEBUG oslo_concurrency.lockutils [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquired lock "refresh_cache-ee1c638b-1f38-4e21-9369-4d4ff2e13d46" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 775.829820] env[62522]: DEBUG nova.network.neutron [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 776.022310] env[62522]: ERROR nova.scheduler.client.report [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] [req-2039d637-2e54-43ca-99bc-5e051789fb17] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c7fa38b2-245d-4337-a012-22c1a01c0a72. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2039d637-2e54-43ca-99bc-5e051789fb17"}]} [ 776.045206] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a23641-1bf3-89f7-bca8-219ec881519d, 'name': SearchDatastore_Task, 'duration_secs': 0.011811} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.045260] env[62522]: DEBUG oslo_concurrency.lockutils [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 776.046129] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 63a7f41d-13cc-420a-96d3-a3f102869137/63a7f41d-13cc-420a-96d3-a3f102869137.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 776.046129] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a68a696b-9d63-48c8-a6ee-ab26b50cba74 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.049645] env[62522]: DEBUG nova.scheduler.client.report [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Refreshing inventories for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 776.054525] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Waiting for the task: (returnval){ [ 776.054525] env[62522]: value = "task-2415354" [ 776.054525] env[62522]: _type = "Task" [ 776.054525] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.063618] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415354, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.068531] env[62522]: DEBUG nova.scheduler.client.report [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Updating ProviderTree inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 776.068974] env[62522]: DEBUG nova.compute.provider_tree [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 776.084647] env[62522]: DEBUG nova.scheduler.client.report [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Refreshing aggregate associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, aggregates: None {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 776.105520] env[62522]: DEBUG nova.scheduler.client.report [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Refreshing trait associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 776.138856] env[62522]: DEBUG oslo_vmware.api [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415350, 'name': PowerOnVM_Task, 'duration_secs': 0.955304} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.142184] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 776.142351] env[62522]: INFO nova.compute.manager [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Took 10.22 seconds to spawn the instance on the hypervisor. [ 776.142614] env[62522]: DEBUG nova.compute.manager [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 776.143767] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98bb1160-b337-4553-bc35-a67bec528f67 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.271898] env[62522]: DEBUG oslo_vmware.api [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415353, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.298311] env[62522]: DEBUG nova.compute.manager [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 776.335809] env[62522]: DEBUG nova.virt.hardware [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 776.336083] env[62522]: DEBUG nova.virt.hardware [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 776.336249] env[62522]: DEBUG nova.virt.hardware [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 776.336455] env[62522]: DEBUG nova.virt.hardware [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 776.336613] env[62522]: DEBUG nova.virt.hardware [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 776.336791] env[62522]: DEBUG nova.virt.hardware [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 776.337077] env[62522]: DEBUG nova.virt.hardware [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 776.337286] env[62522]: DEBUG nova.virt.hardware [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 776.337570] env[62522]: DEBUG nova.virt.hardware [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 776.337784] env[62522]: DEBUG nova.virt.hardware [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 776.337970] env[62522]: DEBUG nova.virt.hardware [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 776.340657] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-699b3137-dc57-43c0-bd29-896fa7eac0c0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.353784] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-420c9d22-a8ae-481f-9940-6029d8b5bb7a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.390110] env[62522]: DEBUG nova.network.neutron [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 776.568222] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415354, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.474567} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.568502] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 63a7f41d-13cc-420a-96d3-a3f102869137/63a7f41d-13cc-420a-96d3-a3f102869137.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 776.568693] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 776.568944] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0259fd8b-79cd-43bd-8a21-783d53afde81 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.575823] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Waiting for the task: (returnval){ [ 776.575823] env[62522]: value = "task-2415355" [ 776.575823] env[62522]: _type = "Task" [ 776.575823] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.585994] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415355, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.663998] env[62522]: INFO nova.compute.manager [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Took 42.73 seconds to build instance. [ 776.684697] env[62522]: DEBUG nova.network.neutron [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Updating instance_info_cache with network_info: [{"id": "aa9e5bf2-d99c-44ec-8c21-4aa8866616e0", "address": "fa:16:3e:f5:f1:ce", "network": {"id": "d6a06fb0-929f-44b6-93c4-698be8498194", "bridge": "br-int", "label": "tempest-ImagesTestJSON-272550236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61314d3f0b9e4c368312e714a953e549", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "399f3826-705c-45f7-9fe0-3a08a945151a", "external-id": "nsx-vlan-transportzone-936", "segmentation_id": 936, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa9e5bf2-d9", "ovs_interfaceid": "aa9e5bf2-d99c-44ec-8c21-4aa8866616e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.734133] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b471b173-7f28-4796-90fa-06a8d60df4c5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.744302] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00087b83-90af-4b41-b785-89c73d6c31bb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.784449] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fc41f03-76b8-4265-840c-588221388b4c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.792198] env[62522]: DEBUG oslo_vmware.api [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415353, 'name': ReconfigVM_Task, 'duration_secs': 0.530052} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.794217] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Reconfigured VM instance instance-00000027 to attach disk [datastore2] d6935c9b-e4cc-47ed-96d5-e485d60382d6/d6935c9b-e4cc-47ed-96d5-e485d60382d6.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 776.794845] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dcf61340-8db6-48a8-b882-56d613caf1cb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.797167] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5bd62c2-ea7e-4c60-83e6-93b696793ab3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.810408] env[62522]: DEBUG nova.compute.provider_tree [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 776.812658] env[62522]: DEBUG oslo_vmware.api [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for the task: (returnval){ [ 776.812658] env[62522]: value = "task-2415356" [ 776.812658] env[62522]: _type = "Task" [ 776.812658] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.821849] env[62522]: DEBUG oslo_vmware.api [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415356, 'name': Rename_Task} progress is 10%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.091381] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415355, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068029} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.091669] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 777.092487] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6bb5f8c-dc2e-4476-80e4-205d1a2a9b73 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.113184] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Reconfiguring VM instance instance-00000024 to attach disk [datastore2] 63a7f41d-13cc-420a-96d3-a3f102869137/63a7f41d-13cc-420a-96d3-a3f102869137.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 777.113594] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c5b520d-44aa-4ae8-9cf5-37444b3bebf9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.135710] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Waiting for the task: (returnval){ [ 777.135710] env[62522]: value = "task-2415357" [ 777.135710] env[62522]: _type = "Task" [ 777.135710] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.143742] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415357, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.166694] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9cdbe9eb-edd6-4f4d-97a4-9235aae4a2a7 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lock "0d36b844-554e-46e7-9cf9-ef04b67e8898" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 95.873s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 777.188047] env[62522]: DEBUG oslo_concurrency.lockutils [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Releasing lock "refresh_cache-ee1c638b-1f38-4e21-9369-4d4ff2e13d46" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 777.189110] env[62522]: DEBUG nova.compute.manager [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Instance network_info: |[{"id": "aa9e5bf2-d99c-44ec-8c21-4aa8866616e0", "address": "fa:16:3e:f5:f1:ce", "network": {"id": "d6a06fb0-929f-44b6-93c4-698be8498194", "bridge": "br-int", "label": "tempest-ImagesTestJSON-272550236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61314d3f0b9e4c368312e714a953e549", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "399f3826-705c-45f7-9fe0-3a08a945151a", "external-id": "nsx-vlan-transportzone-936", "segmentation_id": 936, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa9e5bf2-d9", "ovs_interfaceid": "aa9e5bf2-d99c-44ec-8c21-4aa8866616e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 777.189547] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f5:f1:ce', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '399f3826-705c-45f7-9fe0-3a08a945151a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aa9e5bf2-d99c-44ec-8c21-4aa8866616e0', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 777.199822] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Creating folder: Project (61314d3f0b9e4c368312e714a953e549). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 777.200796] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7f3a94fb-274c-45f4-b590-0e22dad15a50 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.211976] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Created folder: Project (61314d3f0b9e4c368312e714a953e549) in parent group-v489562. [ 777.213033] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Creating folder: Instances. Parent ref: group-v489674. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 777.213033] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8cf1ed3f-8e1e-4f8a-9c71-1b524cc30f32 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.223126] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Created folder: Instances in parent group-v489674. [ 777.223601] env[62522]: DEBUG oslo.service.loopingcall [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 777.223601] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 777.223826] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-32ec149e-4775-4eec-896a-88db38a9236e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.244032] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 777.244032] env[62522]: value = "task-2415360" [ 777.244032] env[62522]: _type = "Task" [ 777.244032] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.251893] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415360, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.266191] env[62522]: DEBUG nova.network.neutron [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Successfully updated port: def24237-7aea-42f2-a529-09d7bd81d5ab {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 777.328253] env[62522]: DEBUG oslo_vmware.api [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415356, 'name': Rename_Task, 'duration_secs': 0.351395} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.328481] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 777.328644] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c669dd64-dbd0-4562-aa12-06c0da30d930 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.336036] env[62522]: DEBUG oslo_vmware.api [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for the task: (returnval){ [ 777.336036] env[62522]: value = "task-2415361" [ 777.336036] env[62522]: _type = "Task" [ 777.336036] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.349062] env[62522]: DEBUG oslo_vmware.api [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415361, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.359616] env[62522]: DEBUG nova.scheduler.client.report [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Updated inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with generation 64 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 777.360089] env[62522]: DEBUG nova.compute.provider_tree [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Updating resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 generation from 64 to 65 during operation: update_inventory {{(pid=62522) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 777.360089] env[62522]: DEBUG nova.compute.provider_tree [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 777.388192] env[62522]: DEBUG nova.compute.manager [req-616e7145-ee4c-42c6-9871-a105a5fe27e1 req-5eff2cda-875f-426d-b96c-6244d008d501 service nova] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Received event network-changed-aa9e5bf2-d99c-44ec-8c21-4aa8866616e0 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 777.388474] env[62522]: DEBUG nova.compute.manager [req-616e7145-ee4c-42c6-9871-a105a5fe27e1 req-5eff2cda-875f-426d-b96c-6244d008d501 service nova] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Refreshing instance network info cache due to event network-changed-aa9e5bf2-d99c-44ec-8c21-4aa8866616e0. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 777.388817] env[62522]: DEBUG oslo_concurrency.lockutils [req-616e7145-ee4c-42c6-9871-a105a5fe27e1 req-5eff2cda-875f-426d-b96c-6244d008d501 service nova] Acquiring lock "refresh_cache-ee1c638b-1f38-4e21-9369-4d4ff2e13d46" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 777.389067] env[62522]: DEBUG oslo_concurrency.lockutils [req-616e7145-ee4c-42c6-9871-a105a5fe27e1 req-5eff2cda-875f-426d-b96c-6244d008d501 service nova] Acquired lock "refresh_cache-ee1c638b-1f38-4e21-9369-4d4ff2e13d46" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.389519] env[62522]: DEBUG nova.network.neutron [req-616e7145-ee4c-42c6-9871-a105a5fe27e1 req-5eff2cda-875f-426d-b96c-6244d008d501 service nova] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Refreshing network info cache for port aa9e5bf2-d99c-44ec-8c21-4aa8866616e0 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 777.645748] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415357, 'name': ReconfigVM_Task, 'duration_secs': 0.394567} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.646047] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Reconfigured VM instance instance-00000024 to attach disk [datastore2] 63a7f41d-13cc-420a-96d3-a3f102869137/63a7f41d-13cc-420a-96d3-a3f102869137.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 777.646710] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f65f1aaf-8342-43a6-a3f6-6ffc7ccb7a3c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.654190] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Waiting for the task: (returnval){ [ 777.654190] env[62522]: value = "task-2415362" [ 777.654190] env[62522]: _type = "Task" [ 777.654190] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.663820] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415362, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.669810] env[62522]: DEBUG nova.compute.manager [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 777.754775] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415360, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.770836] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "refresh_cache-713dd924-1c96-496a-bd06-cf0235dd6f75" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 777.770836] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquired lock "refresh_cache-713dd924-1c96-496a-bd06-cf0235dd6f75" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.770836] env[62522]: DEBUG nova.network.neutron [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 777.849068] env[62522]: DEBUG oslo_vmware.api [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415361, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.854040] env[62522]: DEBUG oslo_concurrency.lockutils [None req-48137217-f63e-4645-b0f9-325467ae03f2 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Acquiring lock "bf2ccaeb-610a-437b-be94-d3caefbe15c5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 777.854374] env[62522]: DEBUG oslo_concurrency.lockutils [None req-48137217-f63e-4645-b0f9-325467ae03f2 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Lock "bf2ccaeb-610a-437b-be94-d3caefbe15c5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 777.854636] env[62522]: DEBUG oslo_concurrency.lockutils [None req-48137217-f63e-4645-b0f9-325467ae03f2 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Acquiring lock "bf2ccaeb-610a-437b-be94-d3caefbe15c5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 777.854831] env[62522]: DEBUG oslo_concurrency.lockutils [None req-48137217-f63e-4645-b0f9-325467ae03f2 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Lock "bf2ccaeb-610a-437b-be94-d3caefbe15c5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 777.855014] env[62522]: DEBUG oslo_concurrency.lockutils [None req-48137217-f63e-4645-b0f9-325467ae03f2 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Lock "bf2ccaeb-610a-437b-be94-d3caefbe15c5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 777.857102] env[62522]: INFO nova.compute.manager [None req-48137217-f63e-4645-b0f9-325467ae03f2 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Terminating instance [ 777.864869] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.587s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 777.867250] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.701s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 777.872021] env[62522]: INFO nova.compute.claims [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 777.896822] env[62522]: INFO nova.scheduler.client.report [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Deleted allocations for instance 194c1dd8-3b0a-4c29-9779-65f1534121d1 [ 778.165109] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415362, 'name': Rename_Task, 'duration_secs': 0.335378} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.165719] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 778.166246] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb67c360-2282-452c-b1a5-50659b1275d4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.173032] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Waiting for the task: (returnval){ [ 778.173032] env[62522]: value = "task-2415363" [ 778.173032] env[62522]: _type = "Task" [ 778.173032] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.199744] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415363, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.206695] env[62522]: DEBUG oslo_concurrency.lockutils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 778.252590] env[62522]: DEBUG nova.network.neutron [req-616e7145-ee4c-42c6-9871-a105a5fe27e1 req-5eff2cda-875f-426d-b96c-6244d008d501 service nova] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Updated VIF entry in instance network info cache for port aa9e5bf2-d99c-44ec-8c21-4aa8866616e0. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 778.253825] env[62522]: DEBUG nova.network.neutron [req-616e7145-ee4c-42c6-9871-a105a5fe27e1 req-5eff2cda-875f-426d-b96c-6244d008d501 service nova] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Updating instance_info_cache with network_info: [{"id": "aa9e5bf2-d99c-44ec-8c21-4aa8866616e0", "address": "fa:16:3e:f5:f1:ce", "network": {"id": "d6a06fb0-929f-44b6-93c4-698be8498194", "bridge": "br-int", "label": "tempest-ImagesTestJSON-272550236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61314d3f0b9e4c368312e714a953e549", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "399f3826-705c-45f7-9fe0-3a08a945151a", "external-id": "nsx-vlan-transportzone-936", "segmentation_id": 936, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa9e5bf2-d9", "ovs_interfaceid": "aa9e5bf2-d99c-44ec-8c21-4aa8866616e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.263147] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415360, 'name': CreateVM_Task, 'duration_secs': 0.596871} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.263489] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 778.264498] env[62522]: DEBUG oslo_concurrency.lockutils [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 778.264842] env[62522]: DEBUG oslo_concurrency.lockutils [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.265392] env[62522]: DEBUG oslo_concurrency.lockutils [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 778.266613] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b7e2b9b-ebcd-44f5-b28b-1b0779bb46aa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.274731] env[62522]: DEBUG oslo_vmware.api [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 778.274731] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d15870-8d2b-c634-280d-f61d587ac0bf" [ 778.274731] env[62522]: _type = "Task" [ 778.274731] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.287143] env[62522]: DEBUG oslo_vmware.api [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d15870-8d2b-c634-280d-f61d587ac0bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.315989] env[62522]: DEBUG nova.network.neutron [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 778.348745] env[62522]: DEBUG oslo_vmware.api [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415361, 'name': PowerOnVM_Task, 'duration_secs': 0.880646} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.349046] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 778.349272] env[62522]: INFO nova.compute.manager [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Took 9.75 seconds to spawn the instance on the hypervisor. [ 778.349455] env[62522]: DEBUG nova.compute.manager [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 778.350346] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c208fd4-a11a-447b-a4d8-dcd675e59d46 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.361524] env[62522]: DEBUG nova.compute.manager [None req-48137217-f63e-4645-b0f9-325467ae03f2 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 778.361811] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-48137217-f63e-4645-b0f9-325467ae03f2 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 778.362630] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04b8180e-adf1-4774-b073-9f305ee4acdd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.370335] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-48137217-f63e-4645-b0f9-325467ae03f2 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 778.370672] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5278aeac-906f-4596-9d94-c91a2e005eca {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.378595] env[62522]: DEBUG oslo_vmware.api [None req-48137217-f63e-4645-b0f9-325467ae03f2 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Waiting for the task: (returnval){ [ 778.378595] env[62522]: value = "task-2415364" [ 778.378595] env[62522]: _type = "Task" [ 778.378595] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.389617] env[62522]: DEBUG oslo_vmware.api [None req-48137217-f63e-4645-b0f9-325467ae03f2 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Task: {'id': task-2415364, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.408832] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0fdd5ac7-cf17-496e-9993-3a98fcd6b303 tempest-AttachInterfacesV270Test-413475617 tempest-AttachInterfacesV270Test-413475617-project-member] Lock "194c1dd8-3b0a-4c29-9779-65f1534121d1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.146s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 778.605958] env[62522]: DEBUG nova.network.neutron [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Updating instance_info_cache with network_info: [{"id": "def24237-7aea-42f2-a529-09d7bd81d5ab", "address": "fa:16:3e:1a:19:e5", "network": {"id": "70e81afa-0eda-49c5-b072-e79b3c287468", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1715169983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff5da278d2be4ca983424c8291beadec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7654928b-7afe-42e3-a18d-68ecc775cefe", "external-id": "cl2-zone-807", "segmentation_id": 807, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdef24237-7a", "ovs_interfaceid": "def24237-7aea-42f2-a529-09d7bd81d5ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.685467] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415363, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.759758] env[62522]: DEBUG oslo_concurrency.lockutils [req-616e7145-ee4c-42c6-9871-a105a5fe27e1 req-5eff2cda-875f-426d-b96c-6244d008d501 service nova] Releasing lock "refresh_cache-ee1c638b-1f38-4e21-9369-4d4ff2e13d46" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 778.759758] env[62522]: DEBUG nova.compute.manager [req-616e7145-ee4c-42c6-9871-a105a5fe27e1 req-5eff2cda-875f-426d-b96c-6244d008d501 service nova] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Received event network-vif-plugged-def24237-7aea-42f2-a529-09d7bd81d5ab {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 778.759758] env[62522]: DEBUG oslo_concurrency.lockutils [req-616e7145-ee4c-42c6-9871-a105a5fe27e1 req-5eff2cda-875f-426d-b96c-6244d008d501 service nova] Acquiring lock "713dd924-1c96-496a-bd06-cf0235dd6f75-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 778.759758] env[62522]: DEBUG oslo_concurrency.lockutils [req-616e7145-ee4c-42c6-9871-a105a5fe27e1 req-5eff2cda-875f-426d-b96c-6244d008d501 service nova] Lock "713dd924-1c96-496a-bd06-cf0235dd6f75-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 778.759758] env[62522]: DEBUG oslo_concurrency.lockutils [req-616e7145-ee4c-42c6-9871-a105a5fe27e1 req-5eff2cda-875f-426d-b96c-6244d008d501 service nova] Lock "713dd924-1c96-496a-bd06-cf0235dd6f75-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 778.760238] env[62522]: DEBUG nova.compute.manager [req-616e7145-ee4c-42c6-9871-a105a5fe27e1 req-5eff2cda-875f-426d-b96c-6244d008d501 service nova] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] No waiting events found dispatching network-vif-plugged-def24237-7aea-42f2-a529-09d7bd81d5ab {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 778.760238] env[62522]: WARNING nova.compute.manager [req-616e7145-ee4c-42c6-9871-a105a5fe27e1 req-5eff2cda-875f-426d-b96c-6244d008d501 service nova] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Received unexpected event network-vif-plugged-def24237-7aea-42f2-a529-09d7bd81d5ab for instance with vm_state building and task_state spawning. [ 778.760238] env[62522]: DEBUG nova.compute.manager [req-616e7145-ee4c-42c6-9871-a105a5fe27e1 req-5eff2cda-875f-426d-b96c-6244d008d501 service nova] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Received event network-changed-def24237-7aea-42f2-a529-09d7bd81d5ab {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 778.760238] env[62522]: DEBUG nova.compute.manager [req-616e7145-ee4c-42c6-9871-a105a5fe27e1 req-5eff2cda-875f-426d-b96c-6244d008d501 service nova] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Refreshing instance network info cache due to event network-changed-def24237-7aea-42f2-a529-09d7bd81d5ab. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 778.760238] env[62522]: DEBUG oslo_concurrency.lockutils [req-616e7145-ee4c-42c6-9871-a105a5fe27e1 req-5eff2cda-875f-426d-b96c-6244d008d501 service nova] Acquiring lock "refresh_cache-713dd924-1c96-496a-bd06-cf0235dd6f75" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 778.785440] env[62522]: DEBUG oslo_vmware.api [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d15870-8d2b-c634-280d-f61d587ac0bf, 'name': SearchDatastore_Task, 'duration_secs': 0.015338} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.785811] env[62522]: DEBUG oslo_concurrency.lockutils [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 778.786226] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 778.786593] env[62522]: DEBUG oslo_concurrency.lockutils [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 778.786876] env[62522]: DEBUG oslo_concurrency.lockutils [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.787217] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 778.787614] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bbc6eec6-254f-40f9-b6ea-1079258a7def {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.796705] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 778.800020] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 778.800020] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80288c18-6ab7-4ce2-b75f-692ae81cdd12 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.803365] env[62522]: DEBUG oslo_vmware.api [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 778.803365] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521087aa-8265-85dc-8b3e-c4de79f91678" [ 778.803365] env[62522]: _type = "Task" [ 778.803365] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.812282] env[62522]: DEBUG oslo_vmware.api [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521087aa-8265-85dc-8b3e-c4de79f91678, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.869697] env[62522]: INFO nova.compute.manager [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Took 41.77 seconds to build instance. [ 778.900571] env[62522]: DEBUG oslo_vmware.api [None req-48137217-f63e-4645-b0f9-325467ae03f2 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Task: {'id': task-2415364, 'name': PowerOffVM_Task, 'duration_secs': 0.234471} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.900844] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-48137217-f63e-4645-b0f9-325467ae03f2 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 778.901038] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-48137217-f63e-4645-b0f9-325467ae03f2 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 778.901335] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ee042a9b-ed53-4791-b0b1-998c3749d571 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.987142] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-48137217-f63e-4645-b0f9-325467ae03f2 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 778.987616] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-48137217-f63e-4645-b0f9-325467ae03f2 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 778.987837] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-48137217-f63e-4645-b0f9-325467ae03f2 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Deleting the datastore file [datastore2] bf2ccaeb-610a-437b-be94-d3caefbe15c5 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 778.988263] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7a4ff578-a74f-46d0-96eb-200b7a6b0f0c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.998621] env[62522]: DEBUG oslo_vmware.api [None req-48137217-f63e-4645-b0f9-325467ae03f2 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Waiting for the task: (returnval){ [ 778.998621] env[62522]: value = "task-2415366" [ 778.998621] env[62522]: _type = "Task" [ 778.998621] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.010772] env[62522]: DEBUG oslo_vmware.api [None req-48137217-f63e-4645-b0f9-325467ae03f2 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Task: {'id': task-2415366, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.109956] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Releasing lock "refresh_cache-713dd924-1c96-496a-bd06-cf0235dd6f75" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 779.110336] env[62522]: DEBUG nova.compute.manager [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Instance network_info: |[{"id": "def24237-7aea-42f2-a529-09d7bd81d5ab", "address": "fa:16:3e:1a:19:e5", "network": {"id": "70e81afa-0eda-49c5-b072-e79b3c287468", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1715169983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff5da278d2be4ca983424c8291beadec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7654928b-7afe-42e3-a18d-68ecc775cefe", "external-id": "cl2-zone-807", "segmentation_id": 807, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdef24237-7a", "ovs_interfaceid": "def24237-7aea-42f2-a529-09d7bd81d5ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 779.113061] env[62522]: DEBUG oslo_concurrency.lockutils [req-616e7145-ee4c-42c6-9871-a105a5fe27e1 req-5eff2cda-875f-426d-b96c-6244d008d501 service nova] Acquired lock "refresh_cache-713dd924-1c96-496a-bd06-cf0235dd6f75" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.113266] env[62522]: DEBUG nova.network.neutron [req-616e7145-ee4c-42c6-9871-a105a5fe27e1 req-5eff2cda-875f-426d-b96c-6244d008d501 service nova] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Refreshing network info cache for port def24237-7aea-42f2-a529-09d7bd81d5ab {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 779.115058] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:19:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7654928b-7afe-42e3-a18d-68ecc775cefe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'def24237-7aea-42f2-a529-09d7bd81d5ab', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 779.124126] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Creating folder: Project (ff5da278d2be4ca983424c8291beadec). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 779.125236] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dbaf640b-087e-49be-bc25-1d1842209e15 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.143669] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Created folder: Project (ff5da278d2be4ca983424c8291beadec) in parent group-v489562. [ 779.143850] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Creating folder: Instances. Parent ref: group-v489677. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 779.148231] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9106254e-53f1-475a-adbd-02179185cd5a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.159356] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Created folder: Instances in parent group-v489677. [ 779.159648] env[62522]: DEBUG oslo.service.loopingcall [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 779.159876] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 779.160140] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b33bf378-4ffc-43f8-99f2-69ccf0da860d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.186435] env[62522]: DEBUG oslo_vmware.api [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415363, 'name': PowerOnVM_Task, 'duration_secs': 0.583045} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.188753] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 779.188753] env[62522]: DEBUG nova.compute.manager [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 779.188753] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 779.188753] env[62522]: value = "task-2415369" [ 779.188753] env[62522]: _type = "Task" [ 779.188753] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.189028] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a266da-c920-4f20-bf9b-2c72bd025b4e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.206295] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415369, 'name': CreateVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.317696] env[62522]: DEBUG oslo_vmware.api [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521087aa-8265-85dc-8b3e-c4de79f91678, 'name': SearchDatastore_Task, 'duration_secs': 0.011602} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.321620] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb248508-3ea9-49a7-be83-7f8b063195c0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.327510] env[62522]: DEBUG oslo_vmware.api [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 779.327510] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5245a866-efe6-1583-546f-806018229fb8" [ 779.327510] env[62522]: _type = "Task" [ 779.327510] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.340227] env[62522]: DEBUG oslo_vmware.api [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5245a866-efe6-1583-546f-806018229fb8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.376344] env[62522]: DEBUG oslo_concurrency.lockutils [None req-089c87b1-23d9-49ec-96b6-6618f282362e tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lock "d6935c9b-e4cc-47ed-96d5-e485d60382d6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 97.223s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 779.496717] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c22efc5-84c2-44a0-9d9f-1aca9da76f6e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.508196] env[62522]: DEBUG oslo_vmware.api [None req-48137217-f63e-4645-b0f9-325467ae03f2 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Task: {'id': task-2415366, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.357534} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.509915] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-48137217-f63e-4645-b0f9-325467ae03f2 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 779.510025] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-48137217-f63e-4645-b0f9-325467ae03f2 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 779.510190] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-48137217-f63e-4645-b0f9-325467ae03f2 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 779.510380] env[62522]: INFO nova.compute.manager [None req-48137217-f63e-4645-b0f9-325467ae03f2 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Took 1.15 seconds to destroy the instance on the hypervisor. [ 779.510630] env[62522]: DEBUG oslo.service.loopingcall [None req-48137217-f63e-4645-b0f9-325467ae03f2 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 779.510887] env[62522]: DEBUG nova.compute.manager [-] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 779.510986] env[62522]: DEBUG nova.network.neutron [-] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 779.513739] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3dcd52d-056d-420e-9d23-784272b9a831 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.550423] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f9905cb-6257-4dea-b009-fad700a1121f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.564816] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e8c483-f742-4b6d-b05a-5dbacbceaf1c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.581602] env[62522]: DEBUG nova.compute.provider_tree [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 779.705363] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415369, 'name': CreateVM_Task, 'duration_secs': 0.350401} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.705609] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 779.706206] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 779.706360] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.706672] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 779.711802] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6e0397c-91fa-46ed-998f-fcc31e7a8ab1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.719225] env[62522]: DEBUG oslo_concurrency.lockutils [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 779.719225] env[62522]: DEBUG oslo_vmware.api [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 779.719225] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525042f1-b7cb-96f6-86b0-22c20a753af6" [ 779.719225] env[62522]: _type = "Task" [ 779.719225] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.728540] env[62522]: DEBUG oslo_vmware.api [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525042f1-b7cb-96f6-86b0-22c20a753af6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.838606] env[62522]: DEBUG oslo_vmware.api [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5245a866-efe6-1583-546f-806018229fb8, 'name': SearchDatastore_Task, 'duration_secs': 0.01098} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.846591] env[62522]: DEBUG oslo_concurrency.lockutils [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 779.847511] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] ee1c638b-1f38-4e21-9369-4d4ff2e13d46/ee1c638b-1f38-4e21-9369-4d4ff2e13d46.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 779.847511] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-79b4a989-98d5-4757-ad1f-c2b61411bec7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.856979] env[62522]: DEBUG oslo_vmware.api [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 779.856979] env[62522]: value = "task-2415370" [ 779.856979] env[62522]: _type = "Task" [ 779.856979] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.867409] env[62522]: DEBUG oslo_vmware.api [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415370, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.881092] env[62522]: DEBUG nova.compute.manager [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 780.059651] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Acquiring lock "63a7f41d-13cc-420a-96d3-a3f102869137" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.059925] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Lock "63a7f41d-13cc-420a-96d3-a3f102869137" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.060560] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Acquiring lock "63a7f41d-13cc-420a-96d3-a3f102869137-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.060560] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Lock "63a7f41d-13cc-420a-96d3-a3f102869137-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.060722] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Lock "63a7f41d-13cc-420a-96d3-a3f102869137-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.063225] env[62522]: INFO nova.compute.manager [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Terminating instance [ 780.085058] env[62522]: DEBUG nova.scheduler.client.report [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 780.232120] env[62522]: DEBUG oslo_vmware.api [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525042f1-b7cb-96f6-86b0-22c20a753af6, 'name': SearchDatastore_Task, 'duration_secs': 0.015986} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.232471] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 780.232708] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 780.232936] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 780.233098] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.233294] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 780.233562] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-60f0da67-2f1f-4c7b-96b5-583c86733da8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.244223] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 780.244463] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 780.246171] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f8b9dc3-6790-4e00-98a8-7b28f5c2862e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.252679] env[62522]: DEBUG oslo_vmware.api [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 780.252679] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522919ad-e054-b831-5b31-e5eab40ce655" [ 780.252679] env[62522]: _type = "Task" [ 780.252679] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.267729] env[62522]: DEBUG oslo_vmware.api [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522919ad-e054-b831-5b31-e5eab40ce655, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.371296] env[62522]: DEBUG oslo_vmware.api [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415370, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.417356] env[62522]: DEBUG oslo_concurrency.lockutils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.503446] env[62522]: DEBUG nova.network.neutron [req-616e7145-ee4c-42c6-9871-a105a5fe27e1 req-5eff2cda-875f-426d-b96c-6244d008d501 service nova] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Updated VIF entry in instance network info cache for port def24237-7aea-42f2-a529-09d7bd81d5ab. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 780.503821] env[62522]: DEBUG nova.network.neutron [req-616e7145-ee4c-42c6-9871-a105a5fe27e1 req-5eff2cda-875f-426d-b96c-6244d008d501 service nova] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Updating instance_info_cache with network_info: [{"id": "def24237-7aea-42f2-a529-09d7bd81d5ab", "address": "fa:16:3e:1a:19:e5", "network": {"id": "70e81afa-0eda-49c5-b072-e79b3c287468", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1715169983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff5da278d2be4ca983424c8291beadec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7654928b-7afe-42e3-a18d-68ecc775cefe", "external-id": "cl2-zone-807", "segmentation_id": 807, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdef24237-7a", "ovs_interfaceid": "def24237-7aea-42f2-a529-09d7bd81d5ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.568641] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Acquiring lock "refresh_cache-63a7f41d-13cc-420a-96d3-a3f102869137" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 780.568641] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Acquired lock "refresh_cache-63a7f41d-13cc-420a-96d3-a3f102869137" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.568641] env[62522]: DEBUG nova.network.neutron [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 780.592027] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.723s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.592027] env[62522]: DEBUG nova.compute.manager [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 780.594062] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.779s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.594439] env[62522]: DEBUG nova.objects.instance [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Lazy-loading 'resources' on Instance uuid 5b69254a-b34b-48ff-a96c-d8573c9abf3b {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 780.764075] env[62522]: DEBUG oslo_vmware.api [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522919ad-e054-b831-5b31-e5eab40ce655, 'name': SearchDatastore_Task, 'duration_secs': 0.03459} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.764977] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de27b3d0-522b-483a-8874-611853d5932d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.770863] env[62522]: DEBUG oslo_vmware.api [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 780.770863] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52fd78c5-03cc-15a5-b918-ab232c3a7e2a" [ 780.770863] env[62522]: _type = "Task" [ 780.770863] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.785218] env[62522]: DEBUG oslo_vmware.api [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52fd78c5-03cc-15a5-b918-ab232c3a7e2a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.868716] env[62522]: DEBUG oslo_vmware.api [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415370, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.870316} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.868963] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] ee1c638b-1f38-4e21-9369-4d4ff2e13d46/ee1c638b-1f38-4e21-9369-4d4ff2e13d46.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 780.869230] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 780.869492] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1d5c0d78-824a-4574-b9c5-e435bb7dcf90 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.876438] env[62522]: DEBUG oslo_vmware.api [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 780.876438] env[62522]: value = "task-2415371" [ 780.876438] env[62522]: _type = "Task" [ 780.876438] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.885545] env[62522]: DEBUG oslo_vmware.api [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415371, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.010438] env[62522]: DEBUG oslo_concurrency.lockutils [req-616e7145-ee4c-42c6-9871-a105a5fe27e1 req-5eff2cda-875f-426d-b96c-6244d008d501 service nova] Releasing lock "refresh_cache-713dd924-1c96-496a-bd06-cf0235dd6f75" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 781.101142] env[62522]: DEBUG nova.compute.utils [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 781.104860] env[62522]: DEBUG nova.network.neutron [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 781.107940] env[62522]: DEBUG nova.compute.manager [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 781.108376] env[62522]: DEBUG nova.network.neutron [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 781.235295] env[62522]: DEBUG nova.policy [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a6732e15f00d42d49bd3179abc9a687a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d82e384e79747fbaa2156b15938ec48', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 781.248012] env[62522]: DEBUG nova.network.neutron [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.277157] env[62522]: DEBUG nova.network.neutron [-] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.287039] env[62522]: DEBUG oslo_vmware.api [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52fd78c5-03cc-15a5-b918-ab232c3a7e2a, 'name': SearchDatastore_Task, 'duration_secs': 0.015818} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.287406] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 781.287726] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 713dd924-1c96-496a-bd06-cf0235dd6f75/713dd924-1c96-496a-bd06-cf0235dd6f75.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 781.288038] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d092071b-052b-4133-8f00-de15c871bb22 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.296448] env[62522]: DEBUG oslo_vmware.api [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 781.296448] env[62522]: value = "task-2415372" [ 781.296448] env[62522]: _type = "Task" [ 781.296448] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.308909] env[62522]: DEBUG oslo_vmware.api [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415372, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.394743] env[62522]: DEBUG nova.compute.manager [req-fa9f5323-938e-47bd-9d34-b833eb0135d4 req-d29ce35a-5409-45f4-958c-e7393e8dad8d service nova] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Received event network-vif-deleted-fd0b859a-1918-4692-a81c-b2b0e41951a5 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 781.403298] env[62522]: DEBUG oslo_vmware.api [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415371, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067753} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.403600] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 781.404495] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cba11c6-eced-4d8f-b7ed-6604151aa974 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.435158] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Reconfiguring VM instance instance-00000028 to attach disk [datastore2] ee1c638b-1f38-4e21-9369-4d4ff2e13d46/ee1c638b-1f38-4e21-9369-4d4ff2e13d46.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 781.438551] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9d65f1a3-71f1-4ca3-892b-2ee66da3fc53 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.460026] env[62522]: DEBUG oslo_vmware.api [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 781.460026] env[62522]: value = "task-2415373" [ 781.460026] env[62522]: _type = "Task" [ 781.460026] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.470058] env[62522]: DEBUG oslo_vmware.api [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415373, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.605348] env[62522]: DEBUG nova.compute.manager [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 781.756385] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Releasing lock "refresh_cache-63a7f41d-13cc-420a-96d3-a3f102869137" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 781.757078] env[62522]: DEBUG nova.compute.manager [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 781.757343] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 781.758676] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b9cd610-6179-4f17-af49-1887f24712f1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.768183] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 781.771536] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0c90c41c-db74-4bdc-b681-52f0a34e61af {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.778437] env[62522]: DEBUG oslo_vmware.api [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Waiting for the task: (returnval){ [ 781.778437] env[62522]: value = "task-2415374" [ 781.778437] env[62522]: _type = "Task" [ 781.778437] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.783238] env[62522]: INFO nova.compute.manager [-] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Took 2.27 seconds to deallocate network for instance. [ 781.799346] env[62522]: DEBUG oslo_vmware.api [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415374, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.810657] env[62522]: DEBUG oslo_vmware.api [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415372, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.834774] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9ed1c35-78ae-4512-af0c-ad5797782561 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.846071] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c18e8e9-d4d9-415e-8261-d54293a16f74 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.882232] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c8c4021-28b3-49c3-bd19-8fe0003db522 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.891026] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3f88bf1-5e17-4cf8-b6f0-0da768913c8d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.907117] env[62522]: DEBUG nova.compute.provider_tree [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 781.921744] env[62522]: DEBUG nova.network.neutron [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Successfully created port: 8fd4cfe4-17a7-4555-85c3-d8ffc371fe67 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 781.970172] env[62522]: DEBUG oslo_vmware.api [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415373, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.287804] env[62522]: DEBUG oslo_vmware.api [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415374, 'name': PowerOffVM_Task, 'duration_secs': 0.203673} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.288247] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 782.288501] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 782.288755] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-492301b2-eb93-464b-8909-a56a1926443d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.301196] env[62522]: DEBUG oslo_concurrency.lockutils [None req-48137217-f63e-4645-b0f9-325467ae03f2 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 782.312176] env[62522]: DEBUG oslo_vmware.api [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415372, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.605138} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.313764] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 713dd924-1c96-496a-bd06-cf0235dd6f75/713dd924-1c96-496a-bd06-cf0235dd6f75.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 782.314165] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 782.314584] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 782.314652] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 782.314797] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Deleting the datastore file [datastore2] 63a7f41d-13cc-420a-96d3-a3f102869137 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 782.315042] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2812184e-94b1-4f97-af14-e4f44695b80f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.317854] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-88a80ff3-5ab5-49aa-bf2d-90a47f5eac95 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.326426] env[62522]: DEBUG oslo_vmware.api [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 782.326426] env[62522]: value = "task-2415377" [ 782.326426] env[62522]: _type = "Task" [ 782.326426] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.327772] env[62522]: DEBUG oslo_vmware.api [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Waiting for the task: (returnval){ [ 782.327772] env[62522]: value = "task-2415376" [ 782.327772] env[62522]: _type = "Task" [ 782.327772] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.346140] env[62522]: DEBUG oslo_vmware.api [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415376, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.346140] env[62522]: DEBUG oslo_vmware.api [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415377, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.412809] env[62522]: DEBUG nova.scheduler.client.report [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 782.472122] env[62522]: DEBUG oslo_vmware.api [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415373, 'name': ReconfigVM_Task, 'duration_secs': 0.589225} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.472596] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Reconfigured VM instance instance-00000028 to attach disk [datastore2] ee1c638b-1f38-4e21-9369-4d4ff2e13d46/ee1c638b-1f38-4e21-9369-4d4ff2e13d46.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 782.473490] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-25e8d6cd-6c3a-4f51-9077-5734f4ac02a0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.483437] env[62522]: DEBUG oslo_vmware.api [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 782.483437] env[62522]: value = "task-2415378" [ 782.483437] env[62522]: _type = "Task" [ 782.483437] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.494361] env[62522]: DEBUG oslo_vmware.api [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415378, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.614592] env[62522]: DEBUG nova.compute.manager [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 782.650146] env[62522]: DEBUG nova.virt.hardware [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 782.650146] env[62522]: DEBUG nova.virt.hardware [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 782.650146] env[62522]: DEBUG nova.virt.hardware [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 782.650372] env[62522]: DEBUG nova.virt.hardware [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 782.650372] env[62522]: DEBUG nova.virt.hardware [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 782.650372] env[62522]: DEBUG nova.virt.hardware [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 782.650708] env[62522]: DEBUG nova.virt.hardware [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 782.651078] env[62522]: DEBUG nova.virt.hardware [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 782.651412] env[62522]: DEBUG nova.virt.hardware [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 782.651749] env[62522]: DEBUG nova.virt.hardware [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 782.654027] env[62522]: DEBUG nova.virt.hardware [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 782.654027] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42eb8829-d9d6-4f65-b705-a8403cb232a4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.665083] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3d79a5a-6d19-4d49-9c35-92cc7398b89d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.840372] env[62522]: DEBUG oslo_vmware.api [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415377, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.30833} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.843846] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 782.845322] env[62522]: DEBUG oslo_vmware.api [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Task: {'id': task-2415376, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138965} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.846866] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef77bb67-327c-4490-9645-e5bb498e4a79 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.849455] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 782.850065] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 782.850065] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 782.850379] env[62522]: INFO nova.compute.manager [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Took 1.09 seconds to destroy the instance on the hypervisor. [ 782.850980] env[62522]: DEBUG oslo.service.loopingcall [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 782.851344] env[62522]: DEBUG nova.compute.manager [-] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 782.851854] env[62522]: DEBUG nova.network.neutron [-] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 782.877812] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] 713dd924-1c96-496a-bd06-cf0235dd6f75/713dd924-1c96-496a-bd06-cf0235dd6f75.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 782.880829] env[62522]: DEBUG nova.network.neutron [-] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 782.882721] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e6d439f-b720-4b72-987b-e0da9a04faae {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.899613] env[62522]: DEBUG nova.network.neutron [-] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.908698] env[62522]: DEBUG oslo_vmware.api [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 782.908698] env[62522]: value = "task-2415379" [ 782.908698] env[62522]: _type = "Task" [ 782.908698] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.922917] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.327s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 782.923819] env[62522]: DEBUG oslo_vmware.api [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415379, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.923819] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.673s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 782.925679] env[62522]: INFO nova.compute.claims [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 782.951827] env[62522]: INFO nova.scheduler.client.report [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Deleted allocations for instance 5b69254a-b34b-48ff-a96c-d8573c9abf3b [ 782.994095] env[62522]: DEBUG oslo_vmware.api [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415378, 'name': Rename_Task, 'duration_secs': 0.155313} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.995211] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 782.995211] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b48291ac-0b8a-406e-bc94-8542030c4141 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.001158] env[62522]: DEBUG oslo_vmware.api [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 783.001158] env[62522]: value = "task-2415380" [ 783.001158] env[62522]: _type = "Task" [ 783.001158] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.010916] env[62522]: DEBUG oslo_vmware.api [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415380, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.404130] env[62522]: INFO nova.compute.manager [-] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Took 0.55 seconds to deallocate network for instance. [ 783.419675] env[62522]: DEBUG oslo_vmware.api [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415379, 'name': ReconfigVM_Task, 'duration_secs': 0.311608} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.419944] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Reconfigured VM instance instance-00000029 to attach disk [datastore1] 713dd924-1c96-496a-bd06-cf0235dd6f75/713dd924-1c96-496a-bd06-cf0235dd6f75.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 783.420944] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-db9b7020-a584-4fb2-a0f0-d0e15ec609ac {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.429414] env[62522]: DEBUG oslo_vmware.api [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 783.429414] env[62522]: value = "task-2415381" [ 783.429414] env[62522]: _type = "Task" [ 783.429414] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.444696] env[62522]: DEBUG oslo_vmware.api [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415381, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.463657] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f662ba32-e1d1-44f2-a874-d8d9e9665e7a tempest-TenantUsagesTestJSON-22017395 tempest-TenantUsagesTestJSON-22017395-project-member] Lock "5b69254a-b34b-48ff-a96c-d8573c9abf3b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.768s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.510811] env[62522]: DEBUG oslo_vmware.api [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415380, 'name': PowerOnVM_Task, 'duration_secs': 0.489725} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.511623] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 783.511895] env[62522]: INFO nova.compute.manager [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Took 9.92 seconds to spawn the instance on the hypervisor. [ 783.512111] env[62522]: DEBUG nova.compute.manager [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 783.512960] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77be9e41-0d77-49e0-8898-13f919de2f4b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.789595] env[62522]: DEBUG oslo_concurrency.lockutils [None req-454ceffb-9e0c-499e-84ab-3126d74bcfb9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquiring lock "d30397b4-c617-4717-b624-ad1b06331bea" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.789595] env[62522]: DEBUG oslo_concurrency.lockutils [None req-454ceffb-9e0c-499e-84ab-3126d74bcfb9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lock "d30397b4-c617-4717-b624-ad1b06331bea" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 783.789595] env[62522]: DEBUG nova.compute.manager [None req-454ceffb-9e0c-499e-84ab-3126d74bcfb9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 783.789595] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9efd62ed-73f7-4e22-b062-5dd24e62ec82 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.796587] env[62522]: DEBUG nova.compute.manager [None req-454ceffb-9e0c-499e-84ab-3126d74bcfb9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62522) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 783.799910] env[62522]: DEBUG nova.objects.instance [None req-454ceffb-9e0c-499e-84ab-3126d74bcfb9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lazy-loading 'flavor' on Instance uuid d30397b4-c617-4717-b624-ad1b06331bea {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 783.917453] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.921681] env[62522]: DEBUG nova.compute.manager [req-333325e2-0d92-4e6b-b344-8a102da29553 req-af434aca-a56d-4624-8686-ab1e48870b7e service nova] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Received event network-vif-plugged-8fd4cfe4-17a7-4555-85c3-d8ffc371fe67 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 783.921681] env[62522]: DEBUG oslo_concurrency.lockutils [req-333325e2-0d92-4e6b-b344-8a102da29553 req-af434aca-a56d-4624-8686-ab1e48870b7e service nova] Acquiring lock "566c207c-5506-4410-98ab-aee9fdbc5d6e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.921681] env[62522]: DEBUG oslo_concurrency.lockutils [req-333325e2-0d92-4e6b-b344-8a102da29553 req-af434aca-a56d-4624-8686-ab1e48870b7e service nova] Lock "566c207c-5506-4410-98ab-aee9fdbc5d6e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 783.921681] env[62522]: DEBUG oslo_concurrency.lockutils [req-333325e2-0d92-4e6b-b344-8a102da29553 req-af434aca-a56d-4624-8686-ab1e48870b7e service nova] Lock "566c207c-5506-4410-98ab-aee9fdbc5d6e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.921681] env[62522]: DEBUG nova.compute.manager [req-333325e2-0d92-4e6b-b344-8a102da29553 req-af434aca-a56d-4624-8686-ab1e48870b7e service nova] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] No waiting events found dispatching network-vif-plugged-8fd4cfe4-17a7-4555-85c3-d8ffc371fe67 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 783.921918] env[62522]: WARNING nova.compute.manager [req-333325e2-0d92-4e6b-b344-8a102da29553 req-af434aca-a56d-4624-8686-ab1e48870b7e service nova] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Received unexpected event network-vif-plugged-8fd4cfe4-17a7-4555-85c3-d8ffc371fe67 for instance with vm_state building and task_state spawning. [ 783.950842] env[62522]: DEBUG oslo_vmware.api [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415381, 'name': Rename_Task, 'duration_secs': 0.156856} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.950842] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 783.951815] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7f076cf6-529c-460f-9097-131e2726ad8d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.959803] env[62522]: DEBUG oslo_vmware.api [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 783.959803] env[62522]: value = "task-2415382" [ 783.959803] env[62522]: _type = "Task" [ 783.959803] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.971339] env[62522]: DEBUG oslo_vmware.api [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415382, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.036122] env[62522]: INFO nova.compute.manager [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Took 36.06 seconds to build instance. [ 784.175961] env[62522]: DEBUG nova.network.neutron [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Successfully updated port: 8fd4cfe4-17a7-4555-85c3-d8ffc371fe67 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 784.477319] env[62522]: DEBUG oslo_vmware.api [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415382, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.538126] env[62522]: DEBUG oslo_concurrency.lockutils [None req-43299856-5315-4518-a8ca-6b56aa1c633b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "ee1c638b-1f38-4e21-9369-4d4ff2e13d46" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 98.833s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 784.651737] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4047810-5c99-4f1b-9c75-0112c8cb5980 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.661171] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e92a4863-8f0b-4f7b-9802-5c503df5fefd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.698503] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Acquiring lock "refresh_cache-566c207c-5506-4410-98ab-aee9fdbc5d6e" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 784.698631] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Acquired lock "refresh_cache-566c207c-5506-4410-98ab-aee9fdbc5d6e" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.699083] env[62522]: DEBUG nova.network.neutron [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 784.705472] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93a9384b-b217-4126-843b-67cfa7bb47c6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.713632] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31434080-2c82-43a0-9072-7c54838a391f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.732711] env[62522]: DEBUG nova.compute.provider_tree [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 784.816458] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-454ceffb-9e0c-499e-84ab-3126d74bcfb9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 784.817122] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0bc44de1-8905-47a2-99f7-542547375333 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.824886] env[62522]: DEBUG oslo_vmware.api [None req-454ceffb-9e0c-499e-84ab-3126d74bcfb9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for the task: (returnval){ [ 784.824886] env[62522]: value = "task-2415383" [ 784.824886] env[62522]: _type = "Task" [ 784.824886] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.837938] env[62522]: DEBUG oslo_vmware.api [None req-454ceffb-9e0c-499e-84ab-3126d74bcfb9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415383, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.971855] env[62522]: DEBUG oslo_vmware.api [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415382, 'name': PowerOnVM_Task, 'duration_secs': 0.892822} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.972577] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 784.972577] env[62522]: INFO nova.compute.manager [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Took 8.67 seconds to spawn the instance on the hypervisor. [ 784.972577] env[62522]: DEBUG nova.compute.manager [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 784.973793] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a35f999b-16a1-45c7-949d-ff1db893708b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.042478] env[62522]: DEBUG nova.compute.manager [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 785.272942] env[62522]: DEBUG nova.scheduler.client.report [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Updated inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with generation 65 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 785.273270] env[62522]: DEBUG nova.compute.provider_tree [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Updating resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 generation from 65 to 66 during operation: update_inventory {{(pid=62522) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 785.273513] env[62522]: DEBUG nova.compute.provider_tree [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 785.339460] env[62522]: DEBUG oslo_vmware.api [None req-454ceffb-9e0c-499e-84ab-3126d74bcfb9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415383, 'name': PowerOffVM_Task, 'duration_secs': 0.361598} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.340033] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-454ceffb-9e0c-499e-84ab-3126d74bcfb9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 785.341552] env[62522]: DEBUG nova.compute.manager [None req-454ceffb-9e0c-499e-84ab-3126d74bcfb9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 785.342444] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaedca5a-db36-46c7-9f95-607b739058cf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.496675] env[62522]: INFO nova.compute.manager [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Took 35.70 seconds to build instance. [ 785.517339] env[62522]: DEBUG nova.network.neutron [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 785.524188] env[62522]: DEBUG oslo_concurrency.lockutils [None req-420fe96b-91a5-4246-a52d-8330b63f524c tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "ee1c638b-1f38-4e21-9369-4d4ff2e13d46" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.524188] env[62522]: DEBUG oslo_concurrency.lockutils [None req-420fe96b-91a5-4246-a52d-8330b63f524c tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "ee1c638b-1f38-4e21-9369-4d4ff2e13d46" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 785.524188] env[62522]: DEBUG nova.compute.manager [None req-420fe96b-91a5-4246-a52d-8330b63f524c tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 785.526514] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20540610-4ddf-4862-a3c2-1e24c720bfb7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.540384] env[62522]: DEBUG nova.compute.manager [None req-420fe96b-91a5-4246-a52d-8330b63f524c tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62522) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 785.542251] env[62522]: DEBUG nova.objects.instance [None req-420fe96b-91a5-4246-a52d-8330b63f524c tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lazy-loading 'flavor' on Instance uuid ee1c638b-1f38-4e21-9369-4d4ff2e13d46 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 785.576025] env[62522]: DEBUG oslo_concurrency.lockutils [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.779195] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.855s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 785.780338] env[62522]: DEBUG nova.compute.manager [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 785.783762] env[62522]: DEBUG oslo_concurrency.lockutils [None req-10df2d46-cc71-48c1-ab3d-d58d54b75b0d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 27.304s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 785.798289] env[62522]: DEBUG nova.network.neutron [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Updating instance_info_cache with network_info: [{"id": "8fd4cfe4-17a7-4555-85c3-d8ffc371fe67", "address": "fa:16:3e:f5:b6:fa", "network": {"id": "3ce33141-2e44-490b-a38c-1ddb67243436", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-434301613-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d82e384e79747fbaa2156b15938ec48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c6324fd-a761-417c-bc85-b6278daecfc5", "external-id": "nsx-vlan-transportzone-426", "segmentation_id": 426, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fd4cfe4-17", "ovs_interfaceid": "8fd4cfe4-17a7-4555-85c3-d8ffc371fe67", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.860252] env[62522]: DEBUG oslo_concurrency.lockutils [None req-454ceffb-9e0c-499e-84ab-3126d74bcfb9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lock "d30397b4-c617-4717-b624-ad1b06331bea" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.071s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 785.989459] env[62522]: DEBUG nova.compute.manager [req-525ab8a4-92ba-432f-b526-ce25d9c06210 req-30b0c3b7-db78-4572-998b-6587a6887a2c service nova] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Received event network-changed-8fd4cfe4-17a7-4555-85c3-d8ffc371fe67 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 785.989768] env[62522]: DEBUG nova.compute.manager [req-525ab8a4-92ba-432f-b526-ce25d9c06210 req-30b0c3b7-db78-4572-998b-6587a6887a2c service nova] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Refreshing instance network info cache due to event network-changed-8fd4cfe4-17a7-4555-85c3-d8ffc371fe67. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 785.989830] env[62522]: DEBUG oslo_concurrency.lockutils [req-525ab8a4-92ba-432f-b526-ce25d9c06210 req-30b0c3b7-db78-4572-998b-6587a6887a2c service nova] Acquiring lock "refresh_cache-566c207c-5506-4410-98ab-aee9fdbc5d6e" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 785.999316] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0cbf63e5-4159-4b47-a5d3-a8e513db1eee tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "713dd924-1c96-496a-bd06-cf0235dd6f75" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 98.550s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.288562] env[62522]: DEBUG nova.compute.utils [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 786.294952] env[62522]: DEBUG nova.compute.manager [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 786.294952] env[62522]: DEBUG nova.network.neutron [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 786.302251] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Releasing lock "refresh_cache-566c207c-5506-4410-98ab-aee9fdbc5d6e" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.302251] env[62522]: DEBUG nova.compute.manager [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Instance network_info: |[{"id": "8fd4cfe4-17a7-4555-85c3-d8ffc371fe67", "address": "fa:16:3e:f5:b6:fa", "network": {"id": "3ce33141-2e44-490b-a38c-1ddb67243436", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-434301613-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d82e384e79747fbaa2156b15938ec48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c6324fd-a761-417c-bc85-b6278daecfc5", "external-id": "nsx-vlan-transportzone-426", "segmentation_id": 426, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fd4cfe4-17", "ovs_interfaceid": "8fd4cfe4-17a7-4555-85c3-d8ffc371fe67", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 786.302407] env[62522]: DEBUG oslo_concurrency.lockutils [req-525ab8a4-92ba-432f-b526-ce25d9c06210 req-30b0c3b7-db78-4572-998b-6587a6887a2c service nova] Acquired lock "refresh_cache-566c207c-5506-4410-98ab-aee9fdbc5d6e" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.302529] env[62522]: DEBUG nova.network.neutron [req-525ab8a4-92ba-432f-b526-ce25d9c06210 req-30b0c3b7-db78-4572-998b-6587a6887a2c service nova] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Refreshing network info cache for port 8fd4cfe4-17a7-4555-85c3-d8ffc371fe67 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 786.304250] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f5:b6:fa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2c6324fd-a761-417c-bc85-b6278daecfc5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8fd4cfe4-17a7-4555-85c3-d8ffc371fe67', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 786.313578] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Creating folder: Project (2d82e384e79747fbaa2156b15938ec48). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 786.314946] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6a6574cf-1469-413c-b1a4-3d17c5f035fe {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.329784] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Created folder: Project (2d82e384e79747fbaa2156b15938ec48) in parent group-v489562. [ 786.330034] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Creating folder: Instances. Parent ref: group-v489683. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 786.330312] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-09936297-4992-46ee-ad52-b6f56776858c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.340529] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Created folder: Instances in parent group-v489683. [ 786.340529] env[62522]: DEBUG oslo.service.loopingcall [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 786.340740] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 786.340814] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6831d9e5-98c2-49a0-8767-94622dd9c60b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.369559] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 786.369559] env[62522]: value = "task-2415389" [ 786.369559] env[62522]: _type = "Task" [ 786.369559] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.379615] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415389, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.419994] env[62522]: DEBUG nova.policy [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e959d2cd75d94a38b0d6a7b93f74f819', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '13a5a5169d8345a7a88fef5ff0ecd26e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 786.503770] env[62522]: DEBUG nova.compute.manager [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 786.552995] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-420fe96b-91a5-4246-a52d-8330b63f524c tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 786.553359] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c94341c6-68f7-4218-9c91-2ebf2308ebef {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.564951] env[62522]: DEBUG oslo_vmware.api [None req-420fe96b-91a5-4246-a52d-8330b63f524c tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 786.564951] env[62522]: value = "task-2415390" [ 786.564951] env[62522]: _type = "Task" [ 786.564951] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.574769] env[62522]: DEBUG oslo_vmware.api [None req-420fe96b-91a5-4246-a52d-8330b63f524c tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415390, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.731020] env[62522]: DEBUG nova.objects.instance [None req-68fe23f4-490a-4490-a04f-ae32b73cca10 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lazy-loading 'flavor' on Instance uuid d30397b4-c617-4717-b624-ad1b06331bea {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 786.799609] env[62522]: DEBUG nova.compute.manager [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 786.865755] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-818838be-a426-4bec-ae8c-0c985524fe4d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.884393] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06dfb583-6722-4886-ab58-3af1cee810d1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.890915] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415389, 'name': CreateVM_Task, 'duration_secs': 0.482067} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.891418] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 786.892280] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.892280] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.892575] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 786.892887] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d234ad5-a6ce-42ee-bf7b-4666ea9d16a6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.922623] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0c6c6d9-2596-47bc-8d01-71560a7ce403 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.923543] env[62522]: DEBUG oslo_vmware.api [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Waiting for the task: (returnval){ [ 786.923543] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]523cdaaa-80c7-7ce2-66c1-a837fc8fbffe" [ 786.923543] env[62522]: _type = "Task" [ 786.923543] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.930905] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5bc7d82-fe8e-46ff-9ca5-8a2694c06113 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.937859] env[62522]: DEBUG oslo_vmware.api [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]523cdaaa-80c7-7ce2-66c1-a837fc8fbffe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.949537] env[62522]: DEBUG nova.compute.provider_tree [None req-10df2d46-cc71-48c1-ab3d-d58d54b75b0d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 787.033376] env[62522]: DEBUG oslo_concurrency.lockutils [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 787.075218] env[62522]: DEBUG oslo_vmware.api [None req-420fe96b-91a5-4246-a52d-8330b63f524c tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415390, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.100296] env[62522]: DEBUG nova.network.neutron [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Successfully created port: b2c9a42e-9c2a-40c7-9e58-882c7a354b9b {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 787.233839] env[62522]: DEBUG oslo_concurrency.lockutils [None req-68fe23f4-490a-4490-a04f-ae32b73cca10 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquiring lock "refresh_cache-d30397b4-c617-4717-b624-ad1b06331bea" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.234050] env[62522]: DEBUG oslo_concurrency.lockutils [None req-68fe23f4-490a-4490-a04f-ae32b73cca10 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquired lock "refresh_cache-d30397b4-c617-4717-b624-ad1b06331bea" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.234265] env[62522]: DEBUG nova.network.neutron [None req-68fe23f4-490a-4490-a04f-ae32b73cca10 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 787.234474] env[62522]: DEBUG nova.objects.instance [None req-68fe23f4-490a-4490-a04f-ae32b73cca10 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lazy-loading 'info_cache' on Instance uuid d30397b4-c617-4717-b624-ad1b06331bea {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 787.350665] env[62522]: DEBUG nova.network.neutron [req-525ab8a4-92ba-432f-b526-ce25d9c06210 req-30b0c3b7-db78-4572-998b-6587a6887a2c service nova] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Updated VIF entry in instance network info cache for port 8fd4cfe4-17a7-4555-85c3-d8ffc371fe67. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 787.351047] env[62522]: DEBUG nova.network.neutron [req-525ab8a4-92ba-432f-b526-ce25d9c06210 req-30b0c3b7-db78-4572-998b-6587a6887a2c service nova] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Updating instance_info_cache with network_info: [{"id": "8fd4cfe4-17a7-4555-85c3-d8ffc371fe67", "address": "fa:16:3e:f5:b6:fa", "network": {"id": "3ce33141-2e44-490b-a38c-1ddb67243436", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-434301613-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d82e384e79747fbaa2156b15938ec48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c6324fd-a761-417c-bc85-b6278daecfc5", "external-id": "nsx-vlan-transportzone-426", "segmentation_id": 426, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fd4cfe4-17", "ovs_interfaceid": "8fd4cfe4-17a7-4555-85c3-d8ffc371fe67", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.442151] env[62522]: DEBUG oslo_vmware.api [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]523cdaaa-80c7-7ce2-66c1-a837fc8fbffe, 'name': SearchDatastore_Task, 'duration_secs': 0.018537} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.442338] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 787.442581] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 787.442819] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.442966] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.443168] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 787.447020] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b11eea9-e69f-489f-8814-6b093a0e365e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.454295] env[62522]: DEBUG nova.scheduler.client.report [None req-10df2d46-cc71-48c1-ab3d-d58d54b75b0d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 787.458154] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 787.461559] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 787.461559] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89eb85ff-8340-401f-86e5-da19306f3bf0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.464840] env[62522]: DEBUG oslo_vmware.api [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Waiting for the task: (returnval){ [ 787.464840] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52cfa6c2-124b-cd70-ed5f-e18df9505388" [ 787.464840] env[62522]: _type = "Task" [ 787.464840] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.474065] env[62522]: DEBUG oslo_vmware.api [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52cfa6c2-124b-cd70-ed5f-e18df9505388, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.478301] env[62522]: INFO nova.compute.manager [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Rebuilding instance [ 787.530088] env[62522]: DEBUG nova.compute.manager [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 787.531294] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0edd82b-8e05-4862-950f-ffcb76fc9abb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.576701] env[62522]: DEBUG oslo_vmware.api [None req-420fe96b-91a5-4246-a52d-8330b63f524c tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415390, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.737505] env[62522]: DEBUG nova.objects.base [None req-68fe23f4-490a-4490-a04f-ae32b73cca10 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=62522) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 787.809452] env[62522]: DEBUG nova.compute.manager [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 787.832488] env[62522]: DEBUG nova.virt.hardware [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 787.832720] env[62522]: DEBUG nova.virt.hardware [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 787.832873] env[62522]: DEBUG nova.virt.hardware [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 787.833072] env[62522]: DEBUG nova.virt.hardware [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 787.833225] env[62522]: DEBUG nova.virt.hardware [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 787.833372] env[62522]: DEBUG nova.virt.hardware [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 787.833585] env[62522]: DEBUG nova.virt.hardware [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 787.833753] env[62522]: DEBUG nova.virt.hardware [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 787.833918] env[62522]: DEBUG nova.virt.hardware [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 787.834108] env[62522]: DEBUG nova.virt.hardware [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 787.834291] env[62522]: DEBUG nova.virt.hardware [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 787.835534] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c914682-5ec0-4cbd-9a32-5eaa2afc65ac {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.843506] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79ad1426-5ff0-46c4-b3dc-364a313f6cfb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.857997] env[62522]: DEBUG oslo_concurrency.lockutils [req-525ab8a4-92ba-432f-b526-ce25d9c06210 req-30b0c3b7-db78-4572-998b-6587a6887a2c service nova] Releasing lock "refresh_cache-566c207c-5506-4410-98ab-aee9fdbc5d6e" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 787.976724] env[62522]: DEBUG oslo_vmware.api [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52cfa6c2-124b-cd70-ed5f-e18df9505388, 'name': SearchDatastore_Task, 'duration_secs': 0.01172} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.978217] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07ee4875-6902-4c84-b49d-af749503a6a4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.984126] env[62522]: DEBUG oslo_vmware.api [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Waiting for the task: (returnval){ [ 787.984126] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5201255b-6d03-c50d-f5a2-57bc687ec1d7" [ 787.984126] env[62522]: _type = "Task" [ 787.984126] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.992252] env[62522]: DEBUG oslo_vmware.api [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5201255b-6d03-c50d-f5a2-57bc687ec1d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.079788] env[62522]: DEBUG oslo_vmware.api [None req-420fe96b-91a5-4246-a52d-8330b63f524c tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415390, 'name': PowerOffVM_Task, 'duration_secs': 1.115564} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.080185] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-420fe96b-91a5-4246-a52d-8330b63f524c tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 788.080425] env[62522]: DEBUG nova.compute.manager [None req-420fe96b-91a5-4246-a52d-8330b63f524c tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 788.081488] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5702ec8-3c65-4be8-a5b3-3d6eafd1ac09 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.460933] env[62522]: DEBUG nova.network.neutron [None req-68fe23f4-490a-4490-a04f-ae32b73cca10 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Updating instance_info_cache with network_info: [{"id": "290fda08-0629-455f-b80b-237754fd93f2", "address": "fa:16:3e:2b:22:54", "network": {"id": "c57ecf55-229f-499c-8cf0-0ae209127cf5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1236590821-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff68a180abec48d7bcf3f13e73cfed2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f3a2eb5-353f-45c5-a73b-869626f4bb13", "external-id": "nsx-vlan-transportzone-411", "segmentation_id": 411, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap290fda08-06", "ovs_interfaceid": "290fda08-0629-455f-b80b-237754fd93f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.468070] env[62522]: DEBUG oslo_concurrency.lockutils [None req-10df2d46-cc71-48c1-ab3d-d58d54b75b0d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.684s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.477415] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.551s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 788.477734] env[62522]: DEBUG nova.objects.instance [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Lazy-loading 'resources' on Instance uuid 3824a70e-8498-410a-904d-c7cd0de0c358 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 788.503423] env[62522]: DEBUG oslo_vmware.api [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5201255b-6d03-c50d-f5a2-57bc687ec1d7, 'name': SearchDatastore_Task, 'duration_secs': 0.012158} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.504017] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 788.504413] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 566c207c-5506-4410-98ab-aee9fdbc5d6e/566c207c-5506-4410-98ab-aee9fdbc5d6e.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 788.505125] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0f66dba1-da3d-49ba-943a-c09f2e955b96 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.514273] env[62522]: DEBUG oslo_vmware.api [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Waiting for the task: (returnval){ [ 788.514273] env[62522]: value = "task-2415391" [ 788.514273] env[62522]: _type = "Task" [ 788.514273] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.531538] env[62522]: DEBUG oslo_vmware.api [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Task: {'id': task-2415391, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.546779] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 788.547231] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-17294fd5-2eb8-48de-b2c7-86bf2121129e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.562032] env[62522]: DEBUG oslo_vmware.api [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 788.562032] env[62522]: value = "task-2415392" [ 788.562032] env[62522]: _type = "Task" [ 788.562032] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.574606] env[62522]: DEBUG oslo_vmware.api [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415392, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.598671] env[62522]: DEBUG oslo_concurrency.lockutils [None req-420fe96b-91a5-4246-a52d-8330b63f524c tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "ee1c638b-1f38-4e21-9369-4d4ff2e13d46" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 3.075s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.968915] env[62522]: DEBUG oslo_concurrency.lockutils [None req-68fe23f4-490a-4490-a04f-ae32b73cca10 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Releasing lock "refresh_cache-d30397b4-c617-4717-b624-ad1b06331bea" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 789.027985] env[62522]: DEBUG oslo_vmware.api [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Task: {'id': task-2415391, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.054116] env[62522]: INFO nova.scheduler.client.report [None req-10df2d46-cc71-48c1-ab3d-d58d54b75b0d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Deleted allocation for migration 232d6454-4d48-4be8-bf2c-81a73db59aa9 [ 789.071555] env[62522]: DEBUG oslo_vmware.api [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415392, 'name': PowerOffVM_Task, 'duration_secs': 0.253698} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.075533] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 789.075809] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 789.076889] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24e48660-06eb-4e75-b67f-8a7c2500eb7b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.085601] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 789.085973] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bf324a5d-b153-43e1-987e-85109de776b1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.159677] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 789.159929] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 789.160135] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Deleting the datastore file [datastore1] 713dd924-1c96-496a-bd06-cf0235dd6f75 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 789.160406] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f314f28c-a302-4896-8882-3d6eced7747b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.168629] env[62522]: DEBUG oslo_vmware.api [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 789.168629] env[62522]: value = "task-2415395" [ 789.168629] env[62522]: _type = "Task" [ 789.168629] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.177319] env[62522]: DEBUG oslo_vmware.api [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415395, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.269561] env[62522]: DEBUG nova.network.neutron [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Successfully updated port: b2c9a42e-9c2a-40c7-9e58-882c7a354b9b {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 789.289982] env[62522]: DEBUG nova.compute.manager [req-ca2c7f3d-54b4-43f1-bb6d-594d2cbb1eb8 req-e5011c5b-08e3-4e05-8715-837c90b53932 service nova] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Received event network-vif-plugged-b2c9a42e-9c2a-40c7-9e58-882c7a354b9b {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 789.289982] env[62522]: DEBUG oslo_concurrency.lockutils [req-ca2c7f3d-54b4-43f1-bb6d-594d2cbb1eb8 req-e5011c5b-08e3-4e05-8715-837c90b53932 service nova] Acquiring lock "d68b472d-2139-4e2d-bb28-7e45d80904cb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 789.289982] env[62522]: DEBUG oslo_concurrency.lockutils [req-ca2c7f3d-54b4-43f1-bb6d-594d2cbb1eb8 req-e5011c5b-08e3-4e05-8715-837c90b53932 service nova] Lock "d68b472d-2139-4e2d-bb28-7e45d80904cb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 789.289982] env[62522]: DEBUG oslo_concurrency.lockutils [req-ca2c7f3d-54b4-43f1-bb6d-594d2cbb1eb8 req-e5011c5b-08e3-4e05-8715-837c90b53932 service nova] Lock "d68b472d-2139-4e2d-bb28-7e45d80904cb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 789.289982] env[62522]: DEBUG nova.compute.manager [req-ca2c7f3d-54b4-43f1-bb6d-594d2cbb1eb8 req-e5011c5b-08e3-4e05-8715-837c90b53932 service nova] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] No waiting events found dispatching network-vif-plugged-b2c9a42e-9c2a-40c7-9e58-882c7a354b9b {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 789.290355] env[62522]: WARNING nova.compute.manager [req-ca2c7f3d-54b4-43f1-bb6d-594d2cbb1eb8 req-e5011c5b-08e3-4e05-8715-837c90b53932 service nova] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Received unexpected event network-vif-plugged-b2c9a42e-9c2a-40c7-9e58-882c7a354b9b for instance with vm_state building and task_state spawning. [ 789.525449] env[62522]: DEBUG oslo_vmware.api [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Task: {'id': task-2415391, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.60554} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.528295] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 566c207c-5506-4410-98ab-aee9fdbc5d6e/566c207c-5506-4410-98ab-aee9fdbc5d6e.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 789.528533] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 789.528993] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8fddca48-60bf-4a5a-a547-8e79b369446d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.535345] env[62522]: DEBUG oslo_vmware.api [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Waiting for the task: (returnval){ [ 789.535345] env[62522]: value = "task-2415396" [ 789.535345] env[62522]: _type = "Task" [ 789.535345] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.546801] env[62522]: DEBUG oslo_vmware.api [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Task: {'id': task-2415396, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.553325] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a3cd7bc-94c2-48d4-9181-58e5dbb38d0f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.560164] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d32efb5d-b951-4229-9fe9-481cc0cadf69 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.568352] env[62522]: DEBUG oslo_concurrency.lockutils [None req-10df2d46-cc71-48c1-ab3d-d58d54b75b0d tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "879354d3-7423-41e2-93f6-0d8d3a120170" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 34.906s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 789.599251] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c052252e-a4aa-4556-a534-4498a3760255 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.615625] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da85a879-9b19-4735-bf86-dfdf04178a51 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.320505] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquiring lock "refresh_cache-d68b472d-2139-4e2d-bb28-7e45d80904cb" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 790.320792] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquired lock "refresh_cache-d68b472d-2139-4e2d-bb28-7e45d80904cb" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.320866] env[62522]: DEBUG nova.network.neutron [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 790.322075] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-68fe23f4-490a-4490-a04f-ae32b73cca10 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 790.329686] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cb71cd26-ba11-4c25-864e-6926616e214f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.344634] env[62522]: DEBUG nova.compute.provider_tree [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 790.352330] env[62522]: DEBUG oslo_vmware.api [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Task: {'id': task-2415396, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073843} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.355815] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 790.356256] env[62522]: DEBUG oslo_vmware.api [None req-68fe23f4-490a-4490-a04f-ae32b73cca10 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for the task: (returnval){ [ 790.356256] env[62522]: value = "task-2415397" [ 790.356256] env[62522]: _type = "Task" [ 790.356256] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.356565] env[62522]: DEBUG oslo_vmware.api [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415395, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.329676} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.357113] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fea9581b-24cf-4aa8-8db1-e61e433bfe82 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.361297] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 790.361487] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 790.361663] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 790.388503] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Reconfiguring VM instance instance-0000002a to attach disk [datastore2] 566c207c-5506-4410-98ab-aee9fdbc5d6e/566c207c-5506-4410-98ab-aee9fdbc5d6e.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 790.392797] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a974a219-7f22-480c-b89f-e415e2a28bb7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.407310] env[62522]: DEBUG oslo_vmware.api [None req-68fe23f4-490a-4490-a04f-ae32b73cca10 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415397, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.412845] env[62522]: DEBUG oslo_vmware.api [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Waiting for the task: (returnval){ [ 790.412845] env[62522]: value = "task-2415398" [ 790.412845] env[62522]: _type = "Task" [ 790.412845] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.421662] env[62522]: DEBUG oslo_vmware.api [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Task: {'id': task-2415398, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.829490] env[62522]: DEBUG nova.compute.manager [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 790.830881] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74bcd0ce-894e-4889-a008-92d380983222 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.869656] env[62522]: ERROR nova.scheduler.client.report [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [req-9dee616d-93e3-44a8-9ce9-65c37d370f46] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c7fa38b2-245d-4337-a012-22c1a01c0a72. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9dee616d-93e3-44a8-9ce9-65c37d370f46"}]} [ 790.875750] env[62522]: DEBUG oslo_vmware.api [None req-68fe23f4-490a-4490-a04f-ae32b73cca10 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415397, 'name': PowerOnVM_Task} progress is 82%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.887552] env[62522]: DEBUG nova.network.neutron [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 790.897350] env[62522]: DEBUG nova.scheduler.client.report [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Refreshing inventories for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 790.919949] env[62522]: DEBUG nova.scheduler.client.report [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Updating ProviderTree inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 790.920628] env[62522]: DEBUG nova.compute.provider_tree [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 790.932596] env[62522]: DEBUG oslo_vmware.api [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Task: {'id': task-2415398, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.941134] env[62522]: DEBUG nova.scheduler.client.report [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Refreshing aggregate associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, aggregates: None {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 790.964601] env[62522]: DEBUG nova.scheduler.client.report [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Refreshing trait associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 791.143122] env[62522]: DEBUG nova.network.neutron [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Updating instance_info_cache with network_info: [{"id": "b2c9a42e-9c2a-40c7-9e58-882c7a354b9b", "address": "fa:16:3e:93:51:23", "network": {"id": "be69fd15-aa3c-4e6e-9334-57674f1f2d81", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-543389568-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13a5a5169d8345a7a88fef5ff0ecd26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2c9a42e-9c", "ovs_interfaceid": "b2c9a42e-9c2a-40c7-9e58-882c7a354b9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.343541] env[62522]: INFO nova.compute.manager [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] instance snapshotting [ 791.343816] env[62522]: WARNING nova.compute.manager [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 791.350225] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e83d167-1275-49ff-b1d1-9154df79fed2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.376698] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a7d8dd-e9d0-42cd-94c3-817a371e0241 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.392980] env[62522]: DEBUG oslo_vmware.api [None req-68fe23f4-490a-4490-a04f-ae32b73cca10 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415397, 'name': PowerOnVM_Task, 'duration_secs': 0.644389} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.400033] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-68fe23f4-490a-4490-a04f-ae32b73cca10 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 791.400033] env[62522]: DEBUG nova.compute.manager [None req-68fe23f4-490a-4490-a04f-ae32b73cca10 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 791.401771] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac5aba85-2fbc-4e9a-8979-bdc3268806e6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.417179] env[62522]: DEBUG nova.virt.hardware [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 791.417690] env[62522]: DEBUG nova.virt.hardware [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 791.417690] env[62522]: DEBUG nova.virt.hardware [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 791.417690] env[62522]: DEBUG nova.virt.hardware [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 791.417801] env[62522]: DEBUG nova.virt.hardware [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 791.418237] env[62522]: DEBUG nova.virt.hardware [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 791.418237] env[62522]: DEBUG nova.virt.hardware [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 791.418355] env[62522]: DEBUG nova.virt.hardware [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 791.418468] env[62522]: DEBUG nova.virt.hardware [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 791.418789] env[62522]: DEBUG nova.virt.hardware [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 791.418789] env[62522]: DEBUG nova.virt.hardware [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 791.423024] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce727605-f634-490d-9d1b-5e05c2ad2ca5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.434420] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb9e0f7-9bb5-415d-a1bb-0740612b7ec6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.441273] env[62522]: DEBUG oslo_vmware.api [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Task: {'id': task-2415398, 'name': ReconfigVM_Task, 'duration_secs': 0.567814} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.442289] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Reconfigured VM instance instance-0000002a to attach disk [datastore2] 566c207c-5506-4410-98ab-aee9fdbc5d6e/566c207c-5506-4410-98ab-aee9fdbc5d6e.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 791.443807] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-843a1881-07e3-4e67-9da5-3e03325f05ab {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.454762] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:19:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7654928b-7afe-42e3-a18d-68ecc775cefe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'def24237-7aea-42f2-a529-09d7bd81d5ab', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 791.462279] env[62522]: DEBUG oslo.service.loopingcall [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 791.465970] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 791.466635] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e52260c-6e37-44f8-8e42-d9a2d7ce8657 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.485390] env[62522]: DEBUG oslo_vmware.api [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Waiting for the task: (returnval){ [ 791.485390] env[62522]: value = "task-2415400" [ 791.485390] env[62522]: _type = "Task" [ 791.485390] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.490338] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 791.490338] env[62522]: value = "task-2415401" [ 791.490338] env[62522]: _type = "Task" [ 791.490338] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.501022] env[62522]: DEBUG oslo_vmware.api [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Task: {'id': task-2415400, 'name': Rename_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.504890] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415401, 'name': CreateVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.548541] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd2b992e-1833-4714-8514-1982bcceae19 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.556691] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b408b02-f6a2-4d23-9870-f6f0183b8302 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.591890] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42f1d244-57c5-4b98-a7cc-380313bc5eca {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.595671] env[62522]: DEBUG nova.compute.manager [req-854d816e-7491-401e-a918-d4d172b2a9d1 req-4e9d46fa-64b5-427a-8f17-b54982f38576 service nova] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Received event network-changed-b2c9a42e-9c2a-40c7-9e58-882c7a354b9b {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 791.595815] env[62522]: DEBUG nova.compute.manager [req-854d816e-7491-401e-a918-d4d172b2a9d1 req-4e9d46fa-64b5-427a-8f17-b54982f38576 service nova] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Refreshing instance network info cache due to event network-changed-b2c9a42e-9c2a-40c7-9e58-882c7a354b9b. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 791.596049] env[62522]: DEBUG oslo_concurrency.lockutils [req-854d816e-7491-401e-a918-d4d172b2a9d1 req-4e9d46fa-64b5-427a-8f17-b54982f38576 service nova] Acquiring lock "refresh_cache-d68b472d-2139-4e2d-bb28-7e45d80904cb" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 791.601932] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa0c41e7-4622-4703-973b-44427de0355b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.618064] env[62522]: DEBUG nova.compute.provider_tree [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 791.647423] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Releasing lock "refresh_cache-d68b472d-2139-4e2d-bb28-7e45d80904cb" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 791.647423] env[62522]: DEBUG nova.compute.manager [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Instance network_info: |[{"id": "b2c9a42e-9c2a-40c7-9e58-882c7a354b9b", "address": "fa:16:3e:93:51:23", "network": {"id": "be69fd15-aa3c-4e6e-9334-57674f1f2d81", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-543389568-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13a5a5169d8345a7a88fef5ff0ecd26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2c9a42e-9c", "ovs_interfaceid": "b2c9a42e-9c2a-40c7-9e58-882c7a354b9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 791.647735] env[62522]: DEBUG oslo_concurrency.lockutils [req-854d816e-7491-401e-a918-d4d172b2a9d1 req-4e9d46fa-64b5-427a-8f17-b54982f38576 service nova] Acquired lock "refresh_cache-d68b472d-2139-4e2d-bb28-7e45d80904cb" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.647924] env[62522]: DEBUG nova.network.neutron [req-854d816e-7491-401e-a918-d4d172b2a9d1 req-4e9d46fa-64b5-427a-8f17-b54982f38576 service nova] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Refreshing network info cache for port b2c9a42e-9c2a-40c7-9e58-882c7a354b9b {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 791.649200] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:51:23', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abe48956-848a-4e1f-b1f1-a27baa5390b9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b2c9a42e-9c2a-40c7-9e58-882c7a354b9b', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 791.657339] env[62522]: DEBUG oslo.service.loopingcall [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 791.657814] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 791.658229] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1872f733-1a8f-4297-a40b-1b3cc59fc382 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.678059] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 791.678059] env[62522]: value = "task-2415402" [ 791.678059] env[62522]: _type = "Task" [ 791.678059] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.685901] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415402, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.901983] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Creating Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 791.902712] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-fc2ae897-2385-410e-afc9-f1b396a3ceeb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.910033] env[62522]: DEBUG oslo_vmware.api [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 791.910033] env[62522]: value = "task-2415403" [ 791.910033] env[62522]: _type = "Task" [ 791.910033] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.919141] env[62522]: DEBUG oslo_vmware.api [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415403, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.001104] env[62522]: DEBUG oslo_vmware.api [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Task: {'id': task-2415400, 'name': Rename_Task, 'duration_secs': 0.19339} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.004591] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 792.005337] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415401, 'name': CreateVM_Task, 'duration_secs': 0.386103} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.005527] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3d4938b0-9600-4e26-8927-85604347c129 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.007055] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 792.007701] env[62522]: DEBUG oslo_concurrency.lockutils [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 792.007861] env[62522]: DEBUG oslo_concurrency.lockutils [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.008202] env[62522]: DEBUG oslo_concurrency.lockutils [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 792.008805] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba16c07b-b9cf-401d-bf62-31e10ef857d6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.013374] env[62522]: DEBUG oslo_vmware.api [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Waiting for the task: (returnval){ [ 792.013374] env[62522]: value = "task-2415404" [ 792.013374] env[62522]: _type = "Task" [ 792.013374] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.014626] env[62522]: DEBUG oslo_vmware.api [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 792.014626] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]527a065e-7fb7-9e19-4c2f-d42a51dc2302" [ 792.014626] env[62522]: _type = "Task" [ 792.014626] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.025943] env[62522]: DEBUG oslo_vmware.api [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Task: {'id': task-2415404, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.030225] env[62522]: DEBUG oslo_vmware.api [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]527a065e-7fb7-9e19-4c2f-d42a51dc2302, 'name': SearchDatastore_Task, 'duration_secs': 0.009591} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.030522] env[62522]: DEBUG oslo_concurrency.lockutils [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 792.031296] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 792.031296] env[62522]: DEBUG oslo_concurrency.lockutils [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 792.031296] env[62522]: DEBUG oslo_concurrency.lockutils [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.031473] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 792.031580] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f5618a6e-53ef-4bed-8f4f-f90f2d629cd8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.039055] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 792.039245] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 792.039958] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9eefb243-1a36-458d-bb09-29176ca058a5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.045159] env[62522]: DEBUG oslo_vmware.api [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 792.045159] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52672c38-fe4b-6a79-1549-611b5d435a0b" [ 792.045159] env[62522]: _type = "Task" [ 792.045159] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.053316] env[62522]: DEBUG oslo_vmware.api [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52672c38-fe4b-6a79-1549-611b5d435a0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.143730] env[62522]: ERROR nova.scheduler.client.report [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [req-30cf6c95-64cd-4a26-94bf-37f1908be990] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c7fa38b2-245d-4337-a012-22c1a01c0a72. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-30cf6c95-64cd-4a26-94bf-37f1908be990"}]} [ 792.162701] env[62522]: DEBUG nova.scheduler.client.report [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Refreshing inventories for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 792.184429] env[62522]: DEBUG nova.scheduler.client.report [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Updating ProviderTree inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 792.184757] env[62522]: DEBUG nova.compute.provider_tree [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 792.195388] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415402, 'name': CreateVM_Task, 'duration_secs': 0.351209} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.195388] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 792.196086] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 792.196295] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.199044] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 792.199044] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f991ffa8-70ed-4965-b98b-abef1273dc2a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.202894] env[62522]: DEBUG oslo_vmware.api [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 792.202894] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]524c252b-080f-7df8-b66c-ba0740d527db" [ 792.202894] env[62522]: _type = "Task" [ 792.202894] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.215384] env[62522]: DEBUG nova.scheduler.client.report [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Refreshing aggregate associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, aggregates: None {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 792.223988] env[62522]: DEBUG oslo_vmware.api [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]524c252b-080f-7df8-b66c-ba0740d527db, 'name': SearchDatastore_Task, 'duration_secs': 0.010025} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.224309] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 792.225681] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 792.225681] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 792.225681] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.225681] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 792.225681] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e0b524bc-137b-40f9-a02d-7f328c0ec406 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.235418] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 792.235418] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 792.236370] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-efa45237-cd78-4f3c-8521-0e49aace1905 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.243991] env[62522]: DEBUG oslo_vmware.api [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 792.243991] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526e087d-7948-ee77-e3da-a8be3088a519" [ 792.243991] env[62522]: _type = "Task" [ 792.243991] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.253513] env[62522]: DEBUG oslo_vmware.api [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526e087d-7948-ee77-e3da-a8be3088a519, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.253890] env[62522]: DEBUG nova.scheduler.client.report [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Refreshing trait associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 792.279170] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquiring lock "74e52638-d284-4bd1-8cff-c7aca9426f75" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 792.279498] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "74e52638-d284-4bd1-8cff-c7aca9426f75" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 792.420239] env[62522]: DEBUG oslo_vmware.api [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415403, 'name': CreateSnapshot_Task, 'duration_secs': 0.478426} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.422701] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Created Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 792.423809] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c55a10-2e16-413d-bf88-53ff7ded392f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.526121] env[62522]: DEBUG oslo_vmware.api [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Task: {'id': task-2415404, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.535663] env[62522]: DEBUG nova.network.neutron [req-854d816e-7491-401e-a918-d4d172b2a9d1 req-4e9d46fa-64b5-427a-8f17-b54982f38576 service nova] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Updated VIF entry in instance network info cache for port b2c9a42e-9c2a-40c7-9e58-882c7a354b9b. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 792.536451] env[62522]: DEBUG nova.network.neutron [req-854d816e-7491-401e-a918-d4d172b2a9d1 req-4e9d46fa-64b5-427a-8f17-b54982f38576 service nova] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Updating instance_info_cache with network_info: [{"id": "b2c9a42e-9c2a-40c7-9e58-882c7a354b9b", "address": "fa:16:3e:93:51:23", "network": {"id": "be69fd15-aa3c-4e6e-9334-57674f1f2d81", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-543389568-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13a5a5169d8345a7a88fef5ff0ecd26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2c9a42e-9c", "ovs_interfaceid": "b2c9a42e-9c2a-40c7-9e58-882c7a354b9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.564441] env[62522]: DEBUG oslo_vmware.api [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52672c38-fe4b-6a79-1549-611b5d435a0b, 'name': SearchDatastore_Task, 'duration_secs': 0.008739} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.565774] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-feafa54e-0177-4e5e-82f4-ebf8c22694f0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.577106] env[62522]: DEBUG oslo_vmware.api [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 792.577106] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]527c258e-8a9e-ab2d-e5d1-238553dd9e95" [ 792.577106] env[62522]: _type = "Task" [ 792.577106] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.595142] env[62522]: DEBUG oslo_vmware.api [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]527c258e-8a9e-ab2d-e5d1-238553dd9e95, 'name': SearchDatastore_Task, 'duration_secs': 0.010055} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.595308] env[62522]: DEBUG oslo_concurrency.lockutils [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 792.595567] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 713dd924-1c96-496a-bd06-cf0235dd6f75/713dd924-1c96-496a-bd06-cf0235dd6f75.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 792.595829] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2c9461c6-b182-4861-9fd9-440b0312191d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.603210] env[62522]: DEBUG oslo_vmware.api [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 792.603210] env[62522]: value = "task-2415405" [ 792.603210] env[62522]: _type = "Task" [ 792.603210] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.614372] env[62522]: DEBUG oslo_vmware.api [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415405, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.734559] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Acquiring lock "6ef27aee-719c-4089-825d-fc117e867bde" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 792.734810] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Lock "6ef27aee-719c-4089-825d-fc117e867bde" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 792.761695] env[62522]: DEBUG oslo_vmware.api [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526e087d-7948-ee77-e3da-a8be3088a519, 'name': SearchDatastore_Task, 'duration_secs': 0.015609} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.761695] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35d70f5a-b32a-4ea2-be8a-14a6e0902c6a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.768903] env[62522]: DEBUG oslo_vmware.api [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 792.768903] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522b0f70-f4d9-73de-5c75-6bccfa38d834" [ 792.768903] env[62522]: _type = "Task" [ 792.768903] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.781614] env[62522]: DEBUG oslo_vmware.api [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522b0f70-f4d9-73de-5c75-6bccfa38d834, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.812751] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52c1f8f2-831e-499c-9009-1f7ca550d8e0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.821729] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aaf0ac4-cd6b-4965-82ff-0379dcd9174e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.862730] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b9f79af-c3dd-4a43-90d1-feb479fba27d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.870649] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bc156a6-7431-497e-a877-03704811d6ba {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.890169] env[62522]: DEBUG nova.compute.provider_tree [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 792.946157] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Creating linked-clone VM from snapshot {{(pid=62522) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 792.946551] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-896c8e88-426e-42b7-93b2-388ff585ac7e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.958862] env[62522]: DEBUG oslo_vmware.api [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 792.958862] env[62522]: value = "task-2415407" [ 792.958862] env[62522]: _type = "Task" [ 792.958862] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.969310] env[62522]: DEBUG oslo_vmware.api [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415407, 'name': CloneVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.024632] env[62522]: DEBUG oslo_vmware.api [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Task: {'id': task-2415404, 'name': PowerOnVM_Task, 'duration_secs': 0.842984} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.024822] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 793.024984] env[62522]: INFO nova.compute.manager [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Took 10.41 seconds to spawn the instance on the hypervisor. [ 793.025403] env[62522]: DEBUG nova.compute.manager [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 793.026255] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fe57ebb-78db-4d18-80c2-178642d1d39d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.039369] env[62522]: DEBUG oslo_concurrency.lockutils [req-854d816e-7491-401e-a918-d4d172b2a9d1 req-4e9d46fa-64b5-427a-8f17-b54982f38576 service nova] Releasing lock "refresh_cache-d68b472d-2139-4e2d-bb28-7e45d80904cb" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 793.116105] env[62522]: DEBUG oslo_vmware.api [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415405, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.504576} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.116753] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 713dd924-1c96-496a-bd06-cf0235dd6f75/713dd924-1c96-496a-bd06-cf0235dd6f75.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 793.116999] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 793.117289] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2dbcdf66-08cf-4c64-a54e-efa19e2750f8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.124581] env[62522]: DEBUG oslo_vmware.api [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 793.124581] env[62522]: value = "task-2415408" [ 793.124581] env[62522]: _type = "Task" [ 793.124581] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.135317] env[62522]: DEBUG oslo_vmware.api [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415408, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.156169] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Acquiring lock "ff6637e9-2a67-4302-9769-24ec045538d4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.156399] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Lock "ff6637e9-2a67-4302-9769-24ec045538d4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.279590] env[62522]: DEBUG oslo_vmware.api [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522b0f70-f4d9-73de-5c75-6bccfa38d834, 'name': SearchDatastore_Task, 'duration_secs': 0.012642} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.279863] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 793.280269] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] d68b472d-2139-4e2d-bb28-7e45d80904cb/d68b472d-2139-4e2d-bb28-7e45d80904cb.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 793.280497] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-845782ed-3ed9-4d1a-a6c2-f96f0b533990 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.287392] env[62522]: DEBUG oslo_vmware.api [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 793.287392] env[62522]: value = "task-2415409" [ 793.287392] env[62522]: _type = "Task" [ 793.287392] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.297235] env[62522]: DEBUG oslo_vmware.api [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415409, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.413716] env[62522]: ERROR nova.scheduler.client.report [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] [req-c76b3c7e-b053-417c-9a5d-ace19ed90afa] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c7fa38b2-245d-4337-a012-22c1a01c0a72. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c76b3c7e-b053-417c-9a5d-ace19ed90afa"}]} [ 793.430582] env[62522]: DEBUG nova.scheduler.client.report [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Refreshing inventories for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 793.446394] env[62522]: DEBUG nova.scheduler.client.report [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Updating ProviderTree inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 793.446753] env[62522]: DEBUG nova.compute.provider_tree [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 793.460768] env[62522]: DEBUG nova.scheduler.client.report [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Refreshing aggregate associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, aggregates: None {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 793.474034] env[62522]: DEBUG oslo_vmware.api [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415407, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.483903] env[62522]: DEBUG nova.scheduler.client.report [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Refreshing trait associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 793.550330] env[62522]: INFO nova.compute.manager [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Took 39.41 seconds to build instance. [ 793.634252] env[62522]: DEBUG oslo_vmware.api [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415408, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.120655} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.636860] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 793.637976] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84675986-9922-44ae-a4f8-068a601ccd42 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.662204] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] 713dd924-1c96-496a-bd06-cf0235dd6f75/713dd924-1c96-496a-bd06-cf0235dd6f75.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 793.665143] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-036ab635-3a52-482a-9c28-32c088963668 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.687695] env[62522]: DEBUG oslo_vmware.api [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 793.687695] env[62522]: value = "task-2415410" [ 793.687695] env[62522]: _type = "Task" [ 793.687695] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.698253] env[62522]: DEBUG oslo_vmware.api [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415410, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.800745] env[62522]: DEBUG oslo_vmware.api [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415409, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.976373] env[62522]: DEBUG oslo_vmware.api [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415407, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.024301] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf1588c7-2ffc-4b77-b8d2-aa689c25bb87 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.032784] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33c96621-3445-4aaa-8429-f8d47d9617d2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.063378] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d2d5a5f4-a47e-493c-bc37-5a96824c0e3c tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Lock "566c207c-5506-4410-98ab-aee9fdbc5d6e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 100.609s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.066483] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d60df3e-01a9-4f67-8328-1adef27861a5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.075133] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-081d6317-3167-4af4-b67c-488a8ab47994 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.092686] env[62522]: DEBUG nova.compute.provider_tree [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 794.199244] env[62522]: DEBUG oslo_vmware.api [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415410, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.298599] env[62522]: DEBUG oslo_vmware.api [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415409, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.99124} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.298872] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] d68b472d-2139-4e2d-bb28-7e45d80904cb/d68b472d-2139-4e2d-bb28-7e45d80904cb.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 794.299177] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 794.299461] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4b76667f-e9d0-415a-8e13-26e5af980b14 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.306277] env[62522]: DEBUG oslo_vmware.api [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 794.306277] env[62522]: value = "task-2415411" [ 794.306277] env[62522]: _type = "Task" [ 794.306277] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.318397] env[62522]: DEBUG oslo_vmware.api [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415411, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.473400] env[62522]: DEBUG oslo_vmware.api [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415407, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.566689] env[62522]: DEBUG nova.compute.manager [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 794.629671] env[62522]: DEBUG nova.scheduler.client.report [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Updated inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with generation 70 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 794.629671] env[62522]: DEBUG nova.compute.provider_tree [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Updating resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 generation from 70 to 71 during operation: update_inventory {{(pid=62522) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 794.629671] env[62522]: DEBUG nova.compute.provider_tree [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 794.697945] env[62522]: DEBUG oslo_vmware.api [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415410, 'name': ReconfigVM_Task, 'duration_secs': 0.665732} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.698242] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Reconfigured VM instance instance-00000029 to attach disk [datastore1] 713dd924-1c96-496a-bd06-cf0235dd6f75/713dd924-1c96-496a-bd06-cf0235dd6f75.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 794.698851] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-173747ac-4a79-4c9b-a0be-2d30294a3daf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.704475] env[62522]: DEBUG oslo_vmware.api [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 794.704475] env[62522]: value = "task-2415412" [ 794.704475] env[62522]: _type = "Task" [ 794.704475] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.711785] env[62522]: DEBUG oslo_vmware.api [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415412, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.816152] env[62522]: DEBUG oslo_vmware.api [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415411, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068671} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.816460] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 794.817248] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c867704-c54b-4ea6-bd48-41914fb7b569 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.840688] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Reconfiguring VM instance instance-0000002b to attach disk [datastore2] d68b472d-2139-4e2d-bb28-7e45d80904cb/d68b472d-2139-4e2d-bb28-7e45d80904cb.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 794.841271] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8726cf2e-5b71-4a2c-b520-bbc33b8ab90a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.868928] env[62522]: DEBUG oslo_vmware.api [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 794.868928] env[62522]: value = "task-2415413" [ 794.868928] env[62522]: _type = "Task" [ 794.868928] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.878303] env[62522]: DEBUG oslo_vmware.api [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415413, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.975239] env[62522]: DEBUG oslo_vmware.api [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415407, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.089700] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 795.134327] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 6.657s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.137647] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 35.917s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.155681] env[62522]: INFO nova.scheduler.client.report [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Deleted allocations for instance 3824a70e-8498-410a-904d-c7cd0de0c358 [ 795.216191] env[62522]: DEBUG oslo_vmware.api [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415412, 'name': Rename_Task, 'duration_secs': 0.360419} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.216380] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 795.216620] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4c19ab75-fef6-40b7-bd45-b7ee6ec04650 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.223525] env[62522]: DEBUG oslo_vmware.api [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 795.223525] env[62522]: value = "task-2415415" [ 795.223525] env[62522]: _type = "Task" [ 795.223525] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.231517] env[62522]: DEBUG oslo_vmware.api [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415415, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.383189] env[62522]: DEBUG oslo_vmware.api [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415413, 'name': ReconfigVM_Task, 'duration_secs': 0.314039} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.383520] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Reconfigured VM instance instance-0000002b to attach disk [datastore2] d68b472d-2139-4e2d-bb28-7e45d80904cb/d68b472d-2139-4e2d-bb28-7e45d80904cb.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 795.384321] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e8983968-2159-4028-9d23-2bee51f305b3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.392433] env[62522]: DEBUG oslo_vmware.api [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 795.392433] env[62522]: value = "task-2415416" [ 795.392433] env[62522]: _type = "Task" [ 795.392433] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.405570] env[62522]: DEBUG oslo_vmware.api [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415416, 'name': Rename_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.478537] env[62522]: DEBUG oslo_vmware.api [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415407, 'name': CloneVM_Task, 'duration_secs': 2.244789} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.478884] env[62522]: INFO nova.virt.vmwareapi.vmops [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Created linked-clone VM from snapshot [ 795.479948] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e86d7091-a9f4-4669-b241-27ebc10d3fee {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.493791] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Uploading image 245beb12-f098-4b71-9546-70c4802654f4 {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 795.522522] env[62522]: DEBUG oslo_vmware.rw_handles [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 795.522522] env[62522]: value = "vm-489689" [ 795.522522] env[62522]: _type = "VirtualMachine" [ 795.522522] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 795.522848] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-8041685d-58d2-4fb5-be03-d4424be76dd2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.533914] env[62522]: DEBUG oslo_vmware.rw_handles [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lease: (returnval){ [ 795.533914] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c613a1-dac1-e01b-109a-bec55f3200fe" [ 795.533914] env[62522]: _type = "HttpNfcLease" [ 795.533914] env[62522]: } obtained for exporting VM: (result){ [ 795.533914] env[62522]: value = "vm-489689" [ 795.533914] env[62522]: _type = "VirtualMachine" [ 795.533914] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 795.534269] env[62522]: DEBUG oslo_vmware.api [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the lease: (returnval){ [ 795.534269] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c613a1-dac1-e01b-109a-bec55f3200fe" [ 795.534269] env[62522]: _type = "HttpNfcLease" [ 795.534269] env[62522]: } to be ready. {{(pid=62522) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 795.546435] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 795.546435] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c613a1-dac1-e01b-109a-bec55f3200fe" [ 795.546435] env[62522]: _type = "HttpNfcLease" [ 795.546435] env[62522]: } is initializing. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 795.665498] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cf908d7-71a9-4f90-b5d1-0b4bd566dab2 tempest-ServersAdminNegativeTestJSON-1683195184 tempest-ServersAdminNegativeTestJSON-1683195184-project-member] Lock "3824a70e-8498-410a-904d-c7cd0de0c358" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.798s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.735217] env[62522]: DEBUG oslo_vmware.api [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415415, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.907505] env[62522]: DEBUG oslo_vmware.api [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415416, 'name': Rename_Task, 'duration_secs': 0.190644} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.907781] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 795.908040] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6eede802-070c-487c-9fac-252d6799bba8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.915151] env[62522]: DEBUG oslo_vmware.api [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 795.915151] env[62522]: value = "task-2415418" [ 795.915151] env[62522]: _type = "Task" [ 795.915151] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.923649] env[62522]: DEBUG oslo_vmware.api [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415418, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.045984] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 796.045984] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c613a1-dac1-e01b-109a-bec55f3200fe" [ 796.045984] env[62522]: _type = "HttpNfcLease" [ 796.045984] env[62522]: } is ready. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 796.046357] env[62522]: DEBUG oslo_vmware.rw_handles [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 796.046357] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c613a1-dac1-e01b-109a-bec55f3200fe" [ 796.046357] env[62522]: _type = "HttpNfcLease" [ 796.046357] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 796.047248] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8574487-0d80-42fd-927f-95780a15776f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.059181] env[62522]: DEBUG oslo_vmware.rw_handles [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525c7ce4-0b9e-66b5-dda6-8ce62e457ab4/disk-0.vmdk from lease info. {{(pid=62522) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 796.059422] env[62522]: DEBUG oslo_vmware.rw_handles [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525c7ce4-0b9e-66b5-dda6-8ce62e457ab4/disk-0.vmdk for reading. {{(pid=62522) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 796.166392] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a4f22df3-24dd-46f7-817e-50da73ddddf7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.197707] env[62522]: WARNING nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance bf2ccaeb-610a-437b-be94-d3caefbe15c5 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 796.197896] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 95e4fe36-6830-4fc4-bb53-1e5643c2f95b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.198036] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.198157] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 19d3d54c-5ba1-420f-b012-a08add8546c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.198282] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance c181ce48-9fe2-4400-9047-f8b5a7159dd3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.198398] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance cce5f0d4-364d-4295-a27d-44ca8585f803 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.198518] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance cd69a052-369b-4809-baf0-a1aec44f4ab5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.198829] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance ae3e55b8-00c1-4dae-9276-f46a1e17b80e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.198959] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance d266aff3-42b4-4dcb-b8ca-7c13cdf8d314 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.199083] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance c1fd078c-61d4-4c0f-8c49-0f56a926a087 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.199229] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance e813e7da-fd2c-4f10-b2f3-1e2b5c153a19 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.199354] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance ebca687d-4de7-4fd6-99fb-b4f0154abe9c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.199491] env[62522]: WARNING nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 63a7f41d-13cc-420a-96d3-a3f102869137 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 796.199606] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance d30397b4-c617-4717-b624-ad1b06331bea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.199734] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 0d36b844-554e-46e7-9cf9-ef04b67e8898 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.199848] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance d6935c9b-e4cc-47ed-96d5-e485d60382d6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.199996] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance ee1c638b-1f38-4e21-9369-4d4ff2e13d46 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.200238] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 713dd924-1c96-496a-bd06-cf0235dd6f75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.200439] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 566c207c-5506-4410-98ab-aee9fdbc5d6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.202038] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 879354d3-7423-41e2-93f6-0d8d3a120170 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.202038] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance d68b472d-2139-4e2d-bb28-7e45d80904cb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.237172] env[62522]: DEBUG oslo_vmware.api [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415415, 'name': PowerOnVM_Task, 'duration_secs': 0.646304} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.237475] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 796.237681] env[62522]: DEBUG nova.compute.manager [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 796.238968] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd4fdd63-8002-4f62-a456-ab3d43c8b8ef {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.404583] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Acquiring lock "566c207c-5506-4410-98ab-aee9fdbc5d6e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.404871] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Lock "566c207c-5506-4410-98ab-aee9fdbc5d6e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.405097] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Acquiring lock "566c207c-5506-4410-98ab-aee9fdbc5d6e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.405292] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Lock "566c207c-5506-4410-98ab-aee9fdbc5d6e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.405463] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Lock "566c207c-5506-4410-98ab-aee9fdbc5d6e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.410680] env[62522]: INFO nova.compute.manager [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Terminating instance [ 796.428546] env[62522]: DEBUG oslo_vmware.api [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415418, 'name': PowerOnVM_Task, 'duration_secs': 0.500016} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.430047] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 796.430047] env[62522]: INFO nova.compute.manager [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Took 8.62 seconds to spawn the instance on the hypervisor. [ 796.430047] env[62522]: DEBUG nova.compute.manager [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 796.431126] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdef6673-d9b1-491f-bd88-93dfd48de76d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.470927] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquiring lock "d6935c9b-e4cc-47ed-96d5-e485d60382d6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.471681] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lock "d6935c9b-e4cc-47ed-96d5-e485d60382d6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.471681] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquiring lock "d6935c9b-e4cc-47ed-96d5-e485d60382d6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.471894] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lock "d6935c9b-e4cc-47ed-96d5-e485d60382d6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.472085] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lock "d6935c9b-e4cc-47ed-96d5-e485d60382d6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.474244] env[62522]: INFO nova.compute.manager [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Terminating instance [ 796.712439] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance a185273e-cdaf-4967-832b-f75014b7b3f4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 796.758190] env[62522]: DEBUG oslo_concurrency.lockutils [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.804338] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Acquiring lock "7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.804338] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Lock "7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.804480] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Acquiring lock "7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.804670] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Lock "7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.805256] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Lock "7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.809320] env[62522]: INFO nova.compute.manager [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Terminating instance [ 796.923130] env[62522]: DEBUG nova.compute.manager [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 796.923130] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 796.923303] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d78eeade-9dc5-402f-bd4b-d9a4b214c4a5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.936610] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 796.937019] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f93a4eb3-2531-402f-ba3e-aaf2ff3f67c4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.956484] env[62522]: DEBUG oslo_vmware.api [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Waiting for the task: (returnval){ [ 796.956484] env[62522]: value = "task-2415419" [ 796.956484] env[62522]: _type = "Task" [ 796.956484] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.968355] env[62522]: INFO nova.compute.manager [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Took 40.74 seconds to build instance. [ 796.975768] env[62522]: DEBUG oslo_vmware.api [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Task: {'id': task-2415419, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.984320] env[62522]: DEBUG nova.compute.manager [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 796.987835] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 796.987835] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90f92877-6b46-4b85-be76-956f35b820d9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.000198] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 797.001456] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1a26d935-02d2-4a8e-9834-0df96d15196b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.009524] env[62522]: DEBUG oslo_vmware.api [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for the task: (returnval){ [ 797.009524] env[62522]: value = "task-2415420" [ 797.009524] env[62522]: _type = "Task" [ 797.009524] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.024740] env[62522]: DEBUG oslo_vmware.api [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415420, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.216921] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 3c4c395c-0625-4569-990d-e2d4ad162c14 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 797.319113] env[62522]: DEBUG nova.compute.manager [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 797.319113] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 797.319113] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b2002c-647f-442a-9e20-9ae6d56f6b89 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.327274] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 797.329026] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f46728b2-15b1-485a-aa2a-6633ee134fbd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.337791] env[62522]: DEBUG oslo_vmware.api [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Waiting for the task: (returnval){ [ 797.337791] env[62522]: value = "task-2415422" [ 797.337791] env[62522]: _type = "Task" [ 797.337791] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.347198] env[62522]: DEBUG oslo_vmware.api [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2415422, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.468444] env[62522]: DEBUG oslo_vmware.api [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Task: {'id': task-2415419, 'name': PowerOffVM_Task, 'duration_secs': 0.247791} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.468444] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 797.468444] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 797.468444] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6884fe2e-2884-4edf-94e1-3b2d4c7816ee {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.477164] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8298b9b4-e4b5-4e31-b80a-230c2254bcfd tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Lock "d68b472d-2139-4e2d-bb28-7e45d80904cb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.046s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.525168] env[62522]: DEBUG oslo_vmware.api [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415420, 'name': PowerOffVM_Task, 'duration_secs': 0.218387} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.525669] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 797.525958] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 797.526635] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-057c77ba-0b42-424a-a991-6f809f50809a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.542059] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 797.542803] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 797.543022] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Deleting the datastore file [datastore2] 566c207c-5506-4410-98ab-aee9fdbc5d6e {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 797.543334] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-71b01d25-7522-401e-9efa-9e5e3adb9d15 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.553998] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Acquiring lock "9141ffdd-cbfa-4efe-a01b-dc1326af474c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.554328] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Lock "9141ffdd-cbfa-4efe-a01b-dc1326af474c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.554546] env[62522]: DEBUG oslo_vmware.api [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Waiting for the task: (returnval){ [ 797.554546] env[62522]: value = "task-2415425" [ 797.554546] env[62522]: _type = "Task" [ 797.554546] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.567030] env[62522]: DEBUG oslo_vmware.api [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Task: {'id': task-2415425, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.613814] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 797.614260] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 797.614378] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Deleting the datastore file [datastore2] d6935c9b-e4cc-47ed-96d5-e485d60382d6 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 797.614768] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d17c8108-0a52-43db-947f-ae1a1dfb8a9a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.623174] env[62522]: DEBUG oslo_vmware.api [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for the task: (returnval){ [ 797.623174] env[62522]: value = "task-2415426" [ 797.623174] env[62522]: _type = "Task" [ 797.623174] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.633284] env[62522]: DEBUG oslo_vmware.api [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415426, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.724163] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 504396d8-077d-4563-91b5-a7a6259eea27 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 797.852985] env[62522]: DEBUG oslo_vmware.api [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2415422, 'name': PowerOffVM_Task, 'duration_secs': 0.256344} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.853305] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 797.853507] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 797.853777] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d2d38801-85d0-41a4-a842-c73a5ef43509 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.921190] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 797.921471] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 797.921698] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Deleting the datastore file [datastore2] 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 797.924045] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-843305a2-8c2d-4358-b544-bdeddf13e27a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.929609] env[62522]: DEBUG oslo_vmware.api [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Waiting for the task: (returnval){ [ 797.929609] env[62522]: value = "task-2415428" [ 797.929609] env[62522]: _type = "Task" [ 797.929609] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.939492] env[62522]: DEBUG oslo_vmware.api [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2415428, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.979992] env[62522]: DEBUG nova.compute.manager [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 798.070744] env[62522]: DEBUG oslo_vmware.api [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Task: {'id': task-2415425, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.270022} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.071031] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 798.071227] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 798.071411] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 798.071587] env[62522]: INFO nova.compute.manager [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Took 1.15 seconds to destroy the instance on the hypervisor. [ 798.071926] env[62522]: DEBUG oslo.service.loopingcall [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 798.072069] env[62522]: DEBUG nova.compute.manager [-] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 798.072172] env[62522]: DEBUG nova.network.neutron [-] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 798.135768] env[62522]: DEBUG oslo_vmware.api [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415426, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.268975} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.136227] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 798.136435] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 798.136602] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 798.136792] env[62522]: INFO nova.compute.manager [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Took 1.15 seconds to destroy the instance on the hypervisor. [ 798.137088] env[62522]: DEBUG oslo.service.loopingcall [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 798.137331] env[62522]: DEBUG nova.compute.manager [-] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 798.137461] env[62522]: DEBUG nova.network.neutron [-] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 798.226507] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 76cb551e-e605-4c80-a6ef-e36681fc0bc2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 798.264499] env[62522]: DEBUG oslo_concurrency.lockutils [None req-88f0ec99-ff18-4e82-8f2e-d2d434ae625d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Acquiring lock "e813e7da-fd2c-4f10-b2f3-1e2b5c153a19" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.264796] env[62522]: DEBUG oslo_concurrency.lockutils [None req-88f0ec99-ff18-4e82-8f2e-d2d434ae625d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Lock "e813e7da-fd2c-4f10-b2f3-1e2b5c153a19" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.439716] env[62522]: DEBUG oslo_vmware.api [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2415428, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.299389} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.440019] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 798.440233] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 798.440664] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 798.440664] env[62522]: INFO nova.compute.manager [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Took 1.12 seconds to destroy the instance on the hypervisor. [ 798.440877] env[62522]: DEBUG oslo.service.loopingcall [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 798.441110] env[62522]: DEBUG nova.compute.manager [-] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 798.441213] env[62522]: DEBUG nova.network.neutron [-] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 798.509361] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.611517] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "713dd924-1c96-496a-bd06-cf0235dd6f75" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.611847] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "713dd924-1c96-496a-bd06-cf0235dd6f75" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.612109] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "713dd924-1c96-496a-bd06-cf0235dd6f75-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.612310] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "713dd924-1c96-496a-bd06-cf0235dd6f75-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.612488] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "713dd924-1c96-496a-bd06-cf0235dd6f75-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.614811] env[62522]: INFO nova.compute.manager [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Terminating instance [ 798.729884] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 5ed51dce-2a56-4389-acf8-280bd93ff5f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 798.768039] env[62522]: DEBUG nova.compute.utils [None req-88f0ec99-ff18-4e82-8f2e-d2d434ae625d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 798.808050] env[62522]: DEBUG nova.compute.manager [None req-281ee2f6-b142-4a42-9b63-0b819be43ec0 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 798.808652] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f32cecd-6765-4bf3-b37c-ec6f4f055a3f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.120454] env[62522]: DEBUG nova.compute.manager [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 799.120688] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 799.123365] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f7e8adc-a01d-4587-9780-74e1c30b8e74 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.136446] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 799.136446] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-53e22e1f-f302-4737-8891-bf31e0bae95f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.144086] env[62522]: DEBUG oslo_vmware.api [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 799.144086] env[62522]: value = "task-2415430" [ 799.144086] env[62522]: _type = "Task" [ 799.144086] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.155632] env[62522]: DEBUG oslo_vmware.api [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415430, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.232957] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 74e663b1-b552-4b71-aa74-308e908d79e7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 799.271663] env[62522]: DEBUG oslo_concurrency.lockutils [None req-88f0ec99-ff18-4e82-8f2e-d2d434ae625d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Lock "e813e7da-fd2c-4f10-b2f3-1e2b5c153a19" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.321157] env[62522]: INFO nova.compute.manager [None req-281ee2f6-b142-4a42-9b63-0b819be43ec0 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] instance snapshotting [ 799.324791] env[62522]: DEBUG nova.network.neutron [-] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.327840] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61f5eeb4-b5bc-48a6-91ae-2cfc1fe98b64 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.335045] env[62522]: DEBUG nova.network.neutron [-] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.362447] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3286f44c-5c51-4311-b366-8db74cf2533d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.608266] env[62522]: DEBUG nova.compute.manager [req-25472028-7d3b-4eba-a6e8-ea8fee4c57e9 req-523f7dd8-b005-4fa9-bc01-3f680c36f7be service nova] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Received event network-vif-deleted-8fd4cfe4-17a7-4555-85c3-d8ffc371fe67 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 799.657345] env[62522]: DEBUG oslo_vmware.api [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415430, 'name': PowerOffVM_Task, 'duration_secs': 0.194257} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.657345] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 799.657535] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 799.657825] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4f778310-2ab3-4c38-896f-447af06fe98c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.690890] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "fe1f5581-0dec-41e5-a450-c3de5a573602" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.691230] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "fe1f5581-0dec-41e5-a450-c3de5a573602" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.727921] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 799.728115] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 799.728327] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Deleting the datastore file [datastore1] 713dd924-1c96-496a-bd06-cf0235dd6f75 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 799.729036] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a1b8e09d-cfef-4cbb-a064-de581bf27978 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.736857] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 7a086314-3e49-48e9-82c9-cead8ecb19d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 799.738141] env[62522]: DEBUG oslo_vmware.api [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 799.738141] env[62522]: value = "task-2415432" [ 799.738141] env[62522]: _type = "Task" [ 799.738141] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.747723] env[62522]: DEBUG oslo_vmware.api [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415432, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.835477] env[62522]: INFO nova.compute.manager [-] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Took 1.76 seconds to deallocate network for instance. [ 799.838181] env[62522]: INFO nova.compute.manager [-] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Took 1.70 seconds to deallocate network for instance. [ 799.879391] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-281ee2f6-b142-4a42-9b63-0b819be43ec0 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Creating Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 799.890772] env[62522]: DEBUG nova.network.neutron [-] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.890772] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-9243bb0c-755e-4211-8134-197e5eed9957 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.901092] env[62522]: DEBUG oslo_vmware.api [None req-281ee2f6-b142-4a42-9b63-0b819be43ec0 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 799.901092] env[62522]: value = "task-2415433" [ 799.901092] env[62522]: _type = "Task" [ 799.901092] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.914063] env[62522]: DEBUG oslo_vmware.api [None req-281ee2f6-b142-4a42-9b63-0b819be43ec0 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415433, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.242186] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance bf44e269-0297-473e-b6ce-04a40d0ec1b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 800.253046] env[62522]: DEBUG oslo_vmware.api [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415432, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18497} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.253762] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 800.253840] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 800.254055] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 800.254246] env[62522]: INFO nova.compute.manager [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Took 1.13 seconds to destroy the instance on the hypervisor. [ 800.254498] env[62522]: DEBUG oslo.service.loopingcall [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 800.254696] env[62522]: DEBUG nova.compute.manager [-] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 800.258039] env[62522]: DEBUG nova.network.neutron [-] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 800.346575] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.347638] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.398596] env[62522]: INFO nova.compute.manager [-] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Took 1.96 seconds to deallocate network for instance. [ 800.399678] env[62522]: DEBUG oslo_concurrency.lockutils [None req-88f0ec99-ff18-4e82-8f2e-d2d434ae625d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Acquiring lock "e813e7da-fd2c-4f10-b2f3-1e2b5c153a19" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.399979] env[62522]: DEBUG oslo_concurrency.lockutils [None req-88f0ec99-ff18-4e82-8f2e-d2d434ae625d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Lock "e813e7da-fd2c-4f10-b2f3-1e2b5c153a19" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.400308] env[62522]: INFO nova.compute.manager [None req-88f0ec99-ff18-4e82-8f2e-d2d434ae625d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Attaching volume fad13b91-225f-48a3-bc96-43a13bc45587 to /dev/sdb [ 800.425628] env[62522]: DEBUG oslo_vmware.api [None req-281ee2f6-b142-4a42-9b63-0b819be43ec0 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415433, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.455142] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4a8858e-3f3c-4c9b-9970-01de842254b0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.462564] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2d34314-2605-476f-b9db-8909b8be9fcc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.479728] env[62522]: DEBUG nova.virt.block_device [None req-88f0ec99-ff18-4e82-8f2e-d2d434ae625d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Updating existing volume attachment record: 7ed2410c-68df-4f7a-bf95-a4227ffc2685 {{(pid=62522) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 800.748216] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 41a980df-88a9-4f9b-b34b-905b226c0675 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 800.918884] env[62522]: DEBUG oslo_vmware.api [None req-281ee2f6-b142-4a42-9b63-0b819be43ec0 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415433, 'name': CreateSnapshot_Task, 'duration_secs': 1.015095} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.919309] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-281ee2f6-b142-4a42-9b63-0b819be43ec0 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Created Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 800.920601] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.921975] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d59376ae-b172-4aca-82a0-40b14899b374 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.082577] env[62522]: DEBUG nova.network.neutron [-] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.257150] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance ed7220fa-fee9-4715-acbb-236682c6729e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 801.449373] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-281ee2f6-b142-4a42-9b63-0b819be43ec0 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Creating linked-clone VM from snapshot {{(pid=62522) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 801.449373] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b8a3ac28-75c8-45df-871c-8f3d18935659 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.457554] env[62522]: DEBUG oslo_vmware.api [None req-281ee2f6-b142-4a42-9b63-0b819be43ec0 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 801.457554] env[62522]: value = "task-2415438" [ 801.457554] env[62522]: _type = "Task" [ 801.457554] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.469095] env[62522]: DEBUG oslo_vmware.api [None req-281ee2f6-b142-4a42-9b63-0b819be43ec0 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415438, 'name': CloneVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.529540] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Acquiring lock "ae3e55b8-00c1-4dae-9276-f46a1e17b80e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 801.529540] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Lock "ae3e55b8-00c1-4dae-9276-f46a1e17b80e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 801.529540] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Acquiring lock "ae3e55b8-00c1-4dae-9276-f46a1e17b80e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 801.529540] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Lock "ae3e55b8-00c1-4dae-9276-f46a1e17b80e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 801.529844] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Lock "ae3e55b8-00c1-4dae-9276-f46a1e17b80e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 801.532588] env[62522]: INFO nova.compute.manager [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Terminating instance [ 801.588543] env[62522]: INFO nova.compute.manager [-] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Took 1.33 seconds to deallocate network for instance. [ 801.762356] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance a10c4dee-4490-445a-bea2-9f8ef5425d15 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 801.975538] env[62522]: DEBUG oslo_vmware.api [None req-281ee2f6-b142-4a42-9b63-0b819be43ec0 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415438, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.038312] env[62522]: DEBUG nova.compute.manager [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 802.038547] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 802.039548] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12edda1b-3bca-4904-879f-de5805c7bd4e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.049693] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 802.051090] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8f821ef1-78c4-48fc-be10-c60d15864fe0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.059071] env[62522]: DEBUG oslo_vmware.api [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Waiting for the task: (returnval){ [ 802.059071] env[62522]: value = "task-2415439" [ 802.059071] env[62522]: _type = "Task" [ 802.059071] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.066223] env[62522]: DEBUG oslo_vmware.api [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Task: {'id': task-2415439, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.076589] env[62522]: DEBUG nova.compute.manager [req-7e5574c7-f096-4168-b131-010004bd8c70 req-9e5c693a-09b8-411f-ae95-7399d0585601 service nova] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Received event network-vif-deleted-bd57fd68-6d70-40a6-beb5-73e810ccf037 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 802.076796] env[62522]: DEBUG nova.compute.manager [req-7e5574c7-f096-4168-b131-010004bd8c70 req-9e5c693a-09b8-411f-ae95-7399d0585601 service nova] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Received event network-vif-deleted-d2781fca-06c0-403d-8704-705de755c0a0 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 802.076930] env[62522]: DEBUG nova.compute.manager [req-7e5574c7-f096-4168-b131-010004bd8c70 req-9e5c693a-09b8-411f-ae95-7399d0585601 service nova] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Received event network-vif-deleted-def24237-7aea-42f2-a529-09d7bd81d5ab {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 802.098993] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 802.268020] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 74e52638-d284-4bd1-8cff-c7aca9426f75 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 802.475236] env[62522]: DEBUG oslo_vmware.api [None req-281ee2f6-b142-4a42-9b63-0b819be43ec0 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415438, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.567981] env[62522]: DEBUG oslo_vmware.api [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Task: {'id': task-2415439, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.771635] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 6ef27aee-719c-4089-825d-fc117e867bde has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 802.975034] env[62522]: DEBUG oslo_vmware.api [None req-281ee2f6-b142-4a42-9b63-0b819be43ec0 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415438, 'name': CloneVM_Task} progress is 95%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.072745] env[62522]: DEBUG oslo_vmware.api [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Task: {'id': task-2415439, 'name': PowerOffVM_Task, 'duration_secs': 0.924627} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.073147] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 803.073626] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 803.073865] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5fe25910-aa08-4047-a6dc-a41d5d965516 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.137743] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 803.137986] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 803.138357] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Deleting the datastore file [datastore2] ae3e55b8-00c1-4dae-9276-f46a1e17b80e {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 803.138510] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4860dd87-8ad0-44a9-a14b-b0fa88c7b686 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.145681] env[62522]: DEBUG oslo_vmware.api [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Waiting for the task: (returnval){ [ 803.145681] env[62522]: value = "task-2415442" [ 803.145681] env[62522]: _type = "Task" [ 803.145681] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.154702] env[62522]: DEBUG oslo_vmware.api [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Task: {'id': task-2415442, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.275896] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance ff6637e9-2a67-4302-9769-24ec045538d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.275896] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Total usable vcpus: 48, total allocated vcpus: 19 {{(pid=62522) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 803.276277] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4288MB phys_disk=200GB used_disk=19GB total_vcpus=48 used_vcpus=19 pci_stats=[] {{(pid=62522) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 803.475374] env[62522]: DEBUG oslo_vmware.api [None req-281ee2f6-b142-4a42-9b63-0b819be43ec0 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415438, 'name': CloneVM_Task, 'duration_secs': 1.859175} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.476201] env[62522]: INFO nova.virt.vmwareapi.vmops [None req-281ee2f6-b142-4a42-9b63-0b819be43ec0 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Created linked-clone VM from snapshot [ 803.476561] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d926380f-3640-4666-b1e4-bc74f3f07783 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.489027] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-281ee2f6-b142-4a42-9b63-0b819be43ec0 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Uploading image ecb7aa79-b539-45f4-bb5a-2284da5979f5 {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 803.503558] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-281ee2f6-b142-4a42-9b63-0b819be43ec0 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Destroying the VM {{(pid=62522) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 803.503858] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-57c9784b-6b6f-47d4-8ccc-5d906c2a0712 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.513118] env[62522]: DEBUG oslo_vmware.api [None req-281ee2f6-b142-4a42-9b63-0b819be43ec0 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 803.513118] env[62522]: value = "task-2415443" [ 803.513118] env[62522]: _type = "Task" [ 803.513118] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.523337] env[62522]: DEBUG oslo_vmware.api [None req-281ee2f6-b142-4a42-9b63-0b819be43ec0 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415443, 'name': Destroy_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.654966] env[62522]: DEBUG oslo_vmware.api [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Task: {'id': task-2415442, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.238997} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.657793] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 803.658041] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 803.658281] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 803.658515] env[62522]: INFO nova.compute.manager [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Took 1.62 seconds to destroy the instance on the hypervisor. [ 803.658783] env[62522]: DEBUG oslo.service.loopingcall [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 803.659176] env[62522]: DEBUG nova.compute.manager [-] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 803.659298] env[62522]: DEBUG nova.network.neutron [-] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 803.778831] env[62522]: DEBUG oslo_concurrency.lockutils [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "c28d2907-5b59-4df8-91a8-4ba0f2047d89" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.779236] env[62522]: DEBUG oslo_concurrency.lockutils [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "c28d2907-5b59-4df8-91a8-4ba0f2047d89" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.797072] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9060ab4e-7c6d-4f7d-b1b6-b7e41f1bb39a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.804764] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d811363d-0506-4bd7-b894-acd24e3a8251 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.835039] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dfe18e8-9ae2-4ebe-9852-ced3a6d99f75 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.843255] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6c8410c-6966-436e-a44d-f01438ebb55e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.855666] env[62522]: DEBUG nova.compute.provider_tree [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 804.024535] env[62522]: DEBUG oslo_vmware.api [None req-281ee2f6-b142-4a42-9b63-0b819be43ec0 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415443, 'name': Destroy_Task} progress is 33%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.290381] env[62522]: DEBUG nova.compute.manager [req-e498f332-3d67-4060-b86b-790c4e6ec4e6 req-caee33a5-f7bb-4ea2-946b-53de6cf5e3f2 service nova] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Received event network-vif-deleted-451cd3c2-eeba-4ad9-a5c5-2ff0a4ee7d71 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 804.290622] env[62522]: INFO nova.compute.manager [req-e498f332-3d67-4060-b86b-790c4e6ec4e6 req-caee33a5-f7bb-4ea2-946b-53de6cf5e3f2 service nova] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Neutron deleted interface 451cd3c2-eeba-4ad9-a5c5-2ff0a4ee7d71; detaching it from the instance and deleting it from the info cache [ 804.290803] env[62522]: DEBUG nova.network.neutron [req-e498f332-3d67-4060-b86b-790c4e6ec4e6 req-caee33a5-f7bb-4ea2-946b-53de6cf5e3f2 service nova] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.360780] env[62522]: DEBUG nova.scheduler.client.report [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 804.441101] env[62522]: DEBUG oslo_vmware.rw_handles [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525c7ce4-0b9e-66b5-dda6-8ce62e457ab4/disk-0.vmdk. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 804.441101] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5981db3-0e1a-4c22-9b00-552e280ff636 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.447705] env[62522]: DEBUG oslo_vmware.rw_handles [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525c7ce4-0b9e-66b5-dda6-8ce62e457ab4/disk-0.vmdk is in state: ready. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 804.448080] env[62522]: ERROR oslo_vmware.rw_handles [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525c7ce4-0b9e-66b5-dda6-8ce62e457ab4/disk-0.vmdk due to incomplete transfer. [ 804.448448] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-448479c6-cf15-42cd-a51e-14d613a61919 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.458755] env[62522]: DEBUG oslo_vmware.rw_handles [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525c7ce4-0b9e-66b5-dda6-8ce62e457ab4/disk-0.vmdk. {{(pid=62522) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 804.459300] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Uploaded image 245beb12-f098-4b71-9546-70c4802654f4 to the Glance image server {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 804.464714] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Destroying the VM {{(pid=62522) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 804.464714] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-cb8a3f1e-1b0c-4930-a007-b22fb8487723 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.470959] env[62522]: DEBUG oslo_vmware.api [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 804.470959] env[62522]: value = "task-2415444" [ 804.470959] env[62522]: _type = "Task" [ 804.470959] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.478267] env[62522]: DEBUG oslo_vmware.api [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415444, 'name': Destroy_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.526131] env[62522]: DEBUG oslo_vmware.api [None req-281ee2f6-b142-4a42-9b63-0b819be43ec0 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415443, 'name': Destroy_Task, 'duration_secs': 0.653583} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.526448] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-281ee2f6-b142-4a42-9b63-0b819be43ec0 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Destroyed the VM [ 804.526775] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-281ee2f6-b142-4a42-9b63-0b819be43ec0 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Deleting Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 804.527057] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-8944c66c-d661-4570-9fa9-9e4cf2f5c6f0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.534150] env[62522]: DEBUG oslo_vmware.api [None req-281ee2f6-b142-4a42-9b63-0b819be43ec0 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 804.534150] env[62522]: value = "task-2415445" [ 804.534150] env[62522]: _type = "Task" [ 804.534150] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.542044] env[62522]: DEBUG oslo_vmware.api [None req-281ee2f6-b142-4a42-9b63-0b819be43ec0 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415445, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.728782] env[62522]: DEBUG nova.network.neutron [-] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.795115] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b7f4cbb0-2a57-4912-bbd1-c64f36e9973e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.805249] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d6a45b5-3a3d-451a-9bee-518506619284 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.841553] env[62522]: DEBUG nova.compute.manager [req-e498f332-3d67-4060-b86b-790c4e6ec4e6 req-caee33a5-f7bb-4ea2-946b-53de6cf5e3f2 service nova] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Detach interface failed, port_id=451cd3c2-eeba-4ad9-a5c5-2ff0a4ee7d71, reason: Instance ae3e55b8-00c1-4dae-9276-f46a1e17b80e could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 804.866920] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62522) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 804.867258] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 9.731s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.868104] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 44.764s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.869385] env[62522]: INFO nova.compute.claims [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 804.978188] env[62522]: DEBUG oslo_vmware.api [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415444, 'name': Destroy_Task, 'duration_secs': 0.46245} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.978562] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Destroyed the VM [ 804.978695] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Deleting Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 804.979697] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-3d1d7211-45d2-4087-8a86-9474ee276f34 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.985505] env[62522]: DEBUG oslo_vmware.api [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 804.985505] env[62522]: value = "task-2415446" [ 804.985505] env[62522]: _type = "Task" [ 804.985505] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.996230] env[62522]: DEBUG oslo_vmware.api [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415446, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.036098] env[62522]: DEBUG nova.objects.instance [None req-d336415f-86f6-4a3f-b76f-51033b8ec6ca tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Lazy-loading 'flavor' on Instance uuid d266aff3-42b4-4dcb-b8ca-7c13cdf8d314 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 805.045795] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-88f0ec99-ff18-4e82-8f2e-d2d434ae625d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Volume attach. Driver type: vmdk {{(pid=62522) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 805.045984] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-88f0ec99-ff18-4e82-8f2e-d2d434ae625d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489692', 'volume_id': 'fad13b91-225f-48a3-bc96-43a13bc45587', 'name': 'volume-fad13b91-225f-48a3-bc96-43a13bc45587', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e813e7da-fd2c-4f10-b2f3-1e2b5c153a19', 'attached_at': '', 'detached_at': '', 'volume_id': 'fad13b91-225f-48a3-bc96-43a13bc45587', 'serial': 'fad13b91-225f-48a3-bc96-43a13bc45587'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 805.047584] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-346f9ac3-d04c-44dd-a2ce-cf28ee0edd49 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.053379] env[62522]: DEBUG oslo_vmware.api [None req-281ee2f6-b142-4a42-9b63-0b819be43ec0 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415445, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.066845] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a88a5aef-a5d0-42b1-b3f0-7f3d85fb9d8a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.097051] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-88f0ec99-ff18-4e82-8f2e-d2d434ae625d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] volume-fad13b91-225f-48a3-bc96-43a13bc45587/volume-fad13b91-225f-48a3-bc96-43a13bc45587.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 805.097940] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9299c9c3-25c1-4158-a623-192bf1eefe50 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.117272] env[62522]: DEBUG oslo_vmware.api [None req-88f0ec99-ff18-4e82-8f2e-d2d434ae625d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Waiting for the task: (returnval){ [ 805.117272] env[62522]: value = "task-2415447" [ 805.117272] env[62522]: _type = "Task" [ 805.117272] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.125799] env[62522]: DEBUG oslo_vmware.api [None req-88f0ec99-ff18-4e82-8f2e-d2d434ae625d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415447, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.233015] env[62522]: INFO nova.compute.manager [-] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Took 1.57 seconds to deallocate network for instance. [ 805.497071] env[62522]: DEBUG oslo_vmware.api [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415446, 'name': RemoveSnapshot_Task, 'duration_secs': 0.41889} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.497071] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Deleted Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 805.497266] env[62522]: INFO nova.compute.manager [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Took 14.15 seconds to snapshot the instance on the hypervisor. [ 805.547345] env[62522]: DEBUG oslo_vmware.api [None req-281ee2f6-b142-4a42-9b63-0b819be43ec0 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415445, 'name': RemoveSnapshot_Task, 'duration_secs': 0.99361} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.547907] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d336415f-86f6-4a3f-b76f-51033b8ec6ca tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Acquiring lock "refresh_cache-d266aff3-42b4-4dcb-b8ca-7c13cdf8d314" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 805.548115] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d336415f-86f6-4a3f-b76f-51033b8ec6ca tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Acquired lock "refresh_cache-d266aff3-42b4-4dcb-b8ca-7c13cdf8d314" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.550015] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-281ee2f6-b142-4a42-9b63-0b819be43ec0 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Deleted Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 805.597377] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Acquiring lock "35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 805.597621] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Lock "35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 805.627971] env[62522]: DEBUG oslo_vmware.api [None req-88f0ec99-ff18-4e82-8f2e-d2d434ae625d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415447, 'name': ReconfigVM_Task, 'duration_secs': 0.481903} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.628379] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-88f0ec99-ff18-4e82-8f2e-d2d434ae625d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Reconfigured VM instance instance-00000022 to attach disk [datastore1] volume-fad13b91-225f-48a3-bc96-43a13bc45587/volume-fad13b91-225f-48a3-bc96-43a13bc45587.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 805.633799] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-440c0ffe-7eb1-4dbe-a958-b8d2e26e7b3b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.648733] env[62522]: DEBUG oslo_vmware.api [None req-88f0ec99-ff18-4e82-8f2e-d2d434ae625d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Waiting for the task: (returnval){ [ 805.648733] env[62522]: value = "task-2415448" [ 805.648733] env[62522]: _type = "Task" [ 805.648733] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.657354] env[62522]: DEBUG oslo_vmware.api [None req-88f0ec99-ff18-4e82-8f2e-d2d434ae625d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415448, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.740642] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 805.926716] env[62522]: DEBUG oslo_concurrency.lockutils [None req-dc97c2af-35dd-4f29-8f54-6cc055e70d38 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "ee1c638b-1f38-4e21-9369-4d4ff2e13d46" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 805.926977] env[62522]: DEBUG oslo_concurrency.lockutils [None req-dc97c2af-35dd-4f29-8f54-6cc055e70d38 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "ee1c638b-1f38-4e21-9369-4d4ff2e13d46" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 805.927206] env[62522]: DEBUG oslo_concurrency.lockutils [None req-dc97c2af-35dd-4f29-8f54-6cc055e70d38 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "ee1c638b-1f38-4e21-9369-4d4ff2e13d46-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 805.927387] env[62522]: DEBUG oslo_concurrency.lockutils [None req-dc97c2af-35dd-4f29-8f54-6cc055e70d38 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "ee1c638b-1f38-4e21-9369-4d4ff2e13d46-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 805.927556] env[62522]: DEBUG oslo_concurrency.lockutils [None req-dc97c2af-35dd-4f29-8f54-6cc055e70d38 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "ee1c638b-1f38-4e21-9369-4d4ff2e13d46-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.931776] env[62522]: INFO nova.compute.manager [None req-dc97c2af-35dd-4f29-8f54-6cc055e70d38 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Terminating instance [ 806.002228] env[62522]: DEBUG nova.compute.manager [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Instance disappeared during snapshot {{(pid=62522) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 806.014675] env[62522]: DEBUG nova.compute.manager [None req-a0388239-f481-4d49-8f30-55ae95e0a1ea tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Image not found during clean up 245beb12-f098-4b71-9546-70c4802654f4 {{(pid=62522) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 806.057175] env[62522]: WARNING nova.compute.manager [None req-281ee2f6-b142-4a42-9b63-0b819be43ec0 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Image not found during snapshot: nova.exception.ImageNotFound: Image ecb7aa79-b539-45f4-bb5a-2284da5979f5 could not be found. [ 806.134711] env[62522]: DEBUG nova.network.neutron [None req-d336415f-86f6-4a3f-b76f-51033b8ec6ca tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 806.161227] env[62522]: DEBUG oslo_vmware.api [None req-88f0ec99-ff18-4e82-8f2e-d2d434ae625d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415448, 'name': ReconfigVM_Task, 'duration_secs': 0.243404} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.161594] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-88f0ec99-ff18-4e82-8f2e-d2d434ae625d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489692', 'volume_id': 'fad13b91-225f-48a3-bc96-43a13bc45587', 'name': 'volume-fad13b91-225f-48a3-bc96-43a13bc45587', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e813e7da-fd2c-4f10-b2f3-1e2b5c153a19', 'attached_at': '', 'detached_at': '', 'volume_id': 'fad13b91-225f-48a3-bc96-43a13bc45587', 'serial': 'fad13b91-225f-48a3-bc96-43a13bc45587'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 806.407813] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba997afc-360c-4c74-ad0b-c323b6e7d971 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.415903] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e81705-bfad-448b-baba-8b130662665b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.449514] env[62522]: DEBUG nova.compute.manager [None req-dc97c2af-35dd-4f29-8f54-6cc055e70d38 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 806.449743] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-dc97c2af-35dd-4f29-8f54-6cc055e70d38 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 806.451081] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70e3716d-0e4e-4867-9247-6b098fcf49ee {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.454627] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71c99a34-b4a4-4e0b-91aa-3abf6f4004cc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.464512] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c970b081-98bb-41ea-a554-a7d3633b7b96 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.468143] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-dc97c2af-35dd-4f29-8f54-6cc055e70d38 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 806.468368] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ec7a7437-f040-4c16-b6ec-2cfcc12d02f5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.478841] env[62522]: DEBUG nova.compute.provider_tree [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 806.523669] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-dc97c2af-35dd-4f29-8f54-6cc055e70d38 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 806.523916] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-dc97c2af-35dd-4f29-8f54-6cc055e70d38 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 806.524115] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc97c2af-35dd-4f29-8f54-6cc055e70d38 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Deleting the datastore file [datastore2] ee1c638b-1f38-4e21-9369-4d4ff2e13d46 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 806.524378] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-987627d2-b716-4b9a-945f-5a7d756bd892 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.531029] env[62522]: DEBUG oslo_vmware.api [None req-dc97c2af-35dd-4f29-8f54-6cc055e70d38 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 806.531029] env[62522]: value = "task-2415450" [ 806.531029] env[62522]: _type = "Task" [ 806.531029] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.534343] env[62522]: DEBUG oslo_concurrency.lockutils [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquiring lock "d68b472d-2139-4e2d-bb28-7e45d80904cb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 806.534592] env[62522]: DEBUG oslo_concurrency.lockutils [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Lock "d68b472d-2139-4e2d-bb28-7e45d80904cb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 806.535305] env[62522]: DEBUG oslo_concurrency.lockutils [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquiring lock "d68b472d-2139-4e2d-bb28-7e45d80904cb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 806.535532] env[62522]: DEBUG oslo_concurrency.lockutils [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Lock "d68b472d-2139-4e2d-bb28-7e45d80904cb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 806.535733] env[62522]: DEBUG oslo_concurrency.lockutils [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Lock "d68b472d-2139-4e2d-bb28-7e45d80904cb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.537897] env[62522]: INFO nova.compute.manager [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Terminating instance [ 806.543149] env[62522]: DEBUG oslo_vmware.api [None req-dc97c2af-35dd-4f29-8f54-6cc055e70d38 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415450, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.587578] env[62522]: DEBUG nova.compute.manager [req-2f90acad-b1f1-46f9-b733-d3e73405c14b req-1eb6d5e5-5c14-4599-a621-224acca6696c service nova] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Received event network-changed-e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 806.588284] env[62522]: DEBUG nova.compute.manager [req-2f90acad-b1f1-46f9-b733-d3e73405c14b req-1eb6d5e5-5c14-4599-a621-224acca6696c service nova] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Refreshing instance network info cache due to event network-changed-e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 806.588284] env[62522]: DEBUG oslo_concurrency.lockutils [req-2f90acad-b1f1-46f9-b733-d3e73405c14b req-1eb6d5e5-5c14-4599-a621-224acca6696c service nova] Acquiring lock "refresh_cache-d266aff3-42b4-4dcb-b8ca-7c13cdf8d314" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 806.662878] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "845f99b8-4a9d-4fbe-89e1-825a5ddd01f2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 806.663183] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "845f99b8-4a9d-4fbe-89e1-825a5ddd01f2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 806.982101] env[62522]: DEBUG nova.scheduler.client.report [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 807.001916] env[62522]: DEBUG nova.network.neutron [None req-d336415f-86f6-4a3f-b76f-51033b8ec6ca tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Updating instance_info_cache with network_info: [{"id": "e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55", "address": "fa:16:3e:94:f4:ed", "network": {"id": "eb0d2ded-859d-46b3-843d-bb580d0bfb6b", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-831675108-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.192", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0dba307f1fbf48bfac98d9836a72254e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0fac98f-bf", "ovs_interfaceid": "e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.040441] env[62522]: DEBUG oslo_vmware.api [None req-dc97c2af-35dd-4f29-8f54-6cc055e70d38 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415450, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13736} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.041078] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc97c2af-35dd-4f29-8f54-6cc055e70d38 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 807.041160] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-dc97c2af-35dd-4f29-8f54-6cc055e70d38 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 807.041277] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-dc97c2af-35dd-4f29-8f54-6cc055e70d38 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 807.041449] env[62522]: INFO nova.compute.manager [None req-dc97c2af-35dd-4f29-8f54-6cc055e70d38 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Took 0.59 seconds to destroy the instance on the hypervisor. [ 807.041725] env[62522]: DEBUG oslo.service.loopingcall [None req-dc97c2af-35dd-4f29-8f54-6cc055e70d38 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 807.041917] env[62522]: DEBUG nova.compute.manager [-] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 807.042025] env[62522]: DEBUG nova.network.neutron [-] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 807.046053] env[62522]: DEBUG nova.compute.manager [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 807.046053] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 807.046277] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c7b8eb-2acb-4439-911b-6bcc6a07d9bc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.053307] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 807.053549] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-53753144-4aa7-41b1-a7c3-70d5577bb5c7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.059902] env[62522]: DEBUG oslo_vmware.api [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 807.059902] env[62522]: value = "task-2415451" [ 807.059902] env[62522]: _type = "Task" [ 807.059902] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.067194] env[62522]: DEBUG oslo_vmware.api [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415451, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.203653] env[62522]: DEBUG nova.objects.instance [None req-88f0ec99-ff18-4e82-8f2e-d2d434ae625d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Lazy-loading 'flavor' on Instance uuid e813e7da-fd2c-4f10-b2f3-1e2b5c153a19 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 807.443033] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "interface-19d3d54c-5ba1-420f-b012-a08add8546c9-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.443033] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "interface-19d3d54c-5ba1-420f-b012-a08add8546c9-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.443471] env[62522]: DEBUG nova.objects.instance [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lazy-loading 'flavor' on Instance uuid 19d3d54c-5ba1-420f-b012-a08add8546c9 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 807.487152] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.619s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 807.487719] env[62522]: DEBUG nova.compute.manager [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 807.490269] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.760s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.491737] env[62522]: INFO nova.compute.claims [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 807.504893] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d336415f-86f6-4a3f-b76f-51033b8ec6ca tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Releasing lock "refresh_cache-d266aff3-42b4-4dcb-b8ca-7c13cdf8d314" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 807.505274] env[62522]: DEBUG nova.compute.manager [None req-d336415f-86f6-4a3f-b76f-51033b8ec6ca tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Inject network info {{(pid=62522) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 807.505488] env[62522]: DEBUG nova.compute.manager [None req-d336415f-86f6-4a3f-b76f-51033b8ec6ca tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] network_info to inject: |[{"id": "e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55", "address": "fa:16:3e:94:f4:ed", "network": {"id": "eb0d2ded-859d-46b3-843d-bb580d0bfb6b", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-831675108-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.192", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0dba307f1fbf48bfac98d9836a72254e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0fac98f-bf", "ovs_interfaceid": "e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 807.510215] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d336415f-86f6-4a3f-b76f-51033b8ec6ca tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Reconfiguring VM instance to set the machine id {{(pid=62522) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 807.510530] env[62522]: DEBUG oslo_concurrency.lockutils [req-2f90acad-b1f1-46f9-b733-d3e73405c14b req-1eb6d5e5-5c14-4599-a621-224acca6696c service nova] Acquired lock "refresh_cache-d266aff3-42b4-4dcb-b8ca-7c13cdf8d314" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.510716] env[62522]: DEBUG nova.network.neutron [req-2f90acad-b1f1-46f9-b733-d3e73405c14b req-1eb6d5e5-5c14-4599-a621-224acca6696c service nova] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Refreshing network info cache for port e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 807.511704] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51c5fe2c-5a5c-4e2d-8e72-b3199ab8b14d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.531074] env[62522]: DEBUG oslo_vmware.api [None req-d336415f-86f6-4a3f-b76f-51033b8ec6ca tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Waiting for the task: (returnval){ [ 807.531074] env[62522]: value = "task-2415452" [ 807.531074] env[62522]: _type = "Task" [ 807.531074] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.540781] env[62522]: DEBUG oslo_vmware.api [None req-d336415f-86f6-4a3f-b76f-51033b8ec6ca tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Task: {'id': task-2415452, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.570245] env[62522]: DEBUG oslo_vmware.api [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415451, 'name': PowerOffVM_Task, 'duration_secs': 0.186968} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.570549] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 807.570737] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 807.571014] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cd265742-2c2f-4ac5-95e9-92ec3ad700d9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.633779] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 807.634029] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 807.634224] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Deleting the datastore file [datastore2] d68b472d-2139-4e2d-bb28-7e45d80904cb {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 807.634490] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2612e172-3e1a-4e70-a49d-bf0b117d1f1b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.641536] env[62522]: DEBUG oslo_vmware.api [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 807.641536] env[62522]: value = "task-2415454" [ 807.641536] env[62522]: _type = "Task" [ 807.641536] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.649527] env[62522]: DEBUG oslo_vmware.api [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415454, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.708554] env[62522]: DEBUG oslo_concurrency.lockutils [None req-88f0ec99-ff18-4e82-8f2e-d2d434ae625d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Lock "e813e7da-fd2c-4f10-b2f3-1e2b5c153a19" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.308s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 807.794887] env[62522]: DEBUG nova.network.neutron [-] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.805198] env[62522]: DEBUG nova.network.neutron [req-2f90acad-b1f1-46f9-b733-d3e73405c14b req-1eb6d5e5-5c14-4599-a621-224acca6696c service nova] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Updated VIF entry in instance network info cache for port e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 807.805578] env[62522]: DEBUG nova.network.neutron [req-2f90acad-b1f1-46f9-b733-d3e73405c14b req-1eb6d5e5-5c14-4599-a621-224acca6696c service nova] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Updating instance_info_cache with network_info: [{"id": "e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55", "address": "fa:16:3e:94:f4:ed", "network": {"id": "eb0d2ded-859d-46b3-843d-bb580d0bfb6b", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-831675108-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.192", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0dba307f1fbf48bfac98d9836a72254e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0fac98f-bf", "ovs_interfaceid": "e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.000159] env[62522]: DEBUG nova.compute.utils [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 808.001664] env[62522]: DEBUG nova.compute.manager [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 808.001850] env[62522]: DEBUG nova.network.neutron [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 808.040422] env[62522]: DEBUG oslo_vmware.api [None req-d336415f-86f6-4a3f-b76f-51033b8ec6ca tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Task: {'id': task-2415452, 'name': ReconfigVM_Task, 'duration_secs': 0.163538} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.040699] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d336415f-86f6-4a3f-b76f-51033b8ec6ca tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Reconfigured VM instance to set the machine id {{(pid=62522) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 808.152871] env[62522]: DEBUG oslo_vmware.api [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415454, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15237} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.153268] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 808.153372] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 808.153509] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 808.153690] env[62522]: INFO nova.compute.manager [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Took 1.11 seconds to destroy the instance on the hypervisor. [ 808.153928] env[62522]: DEBUG oslo.service.loopingcall [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 808.154147] env[62522]: DEBUG nova.compute.manager [-] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 808.154237] env[62522]: DEBUG nova.network.neutron [-] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 808.290591] env[62522]: DEBUG nova.objects.instance [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lazy-loading 'pci_requests' on Instance uuid 19d3d54c-5ba1-420f-b012-a08add8546c9 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 808.298650] env[62522]: INFO nova.compute.manager [-] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Took 1.26 seconds to deallocate network for instance. [ 808.305038] env[62522]: DEBUG nova.policy [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2915209e81104e9eb3f4446a9d85fd57', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3ca7e42d226a4ef6b48b882356da8950', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 808.308061] env[62522]: DEBUG oslo_concurrency.lockutils [req-2f90acad-b1f1-46f9-b733-d3e73405c14b req-1eb6d5e5-5c14-4599-a621-224acca6696c service nova] Releasing lock "refresh_cache-d266aff3-42b4-4dcb-b8ca-7c13cdf8d314" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.505614] env[62522]: DEBUG nova.compute.manager [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 808.794550] env[62522]: DEBUG nova.objects.base [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Object Instance<19d3d54c-5ba1-420f-b012-a08add8546c9> lazy-loaded attributes: flavor,pci_requests {{(pid=62522) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 808.794550] env[62522]: DEBUG nova.network.neutron [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 808.808466] env[62522]: DEBUG oslo_concurrency.lockutils [None req-dc97c2af-35dd-4f29-8f54-6cc055e70d38 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.888040] env[62522]: DEBUG nova.policy [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab5e5a8e6ee64aad8d52342ee3f5af36', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4bdd1f5caf09454d808bcdc15df2d3a7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 808.927396] env[62522]: DEBUG nova.network.neutron [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Successfully created port: 3e725d96-bba9-4651-8fc1-70f66a94b0d1 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 809.090076] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f8187fd-e5ef-41fd-8a17-c84c1d865015 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.096041] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3f69e34a-7662-4c89-b479-97d83e8a226d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Acquiring lock "e813e7da-fd2c-4f10-b2f3-1e2b5c153a19" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.096539] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3f69e34a-7662-4c89-b479-97d83e8a226d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Lock "e813e7da-fd2c-4f10-b2f3-1e2b5c153a19" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.104130] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9cd1f14-1aae-40d1-942c-2f2b857999e9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.139353] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-904a5abb-df6c-424b-b301-be8e50d4160c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.148281] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64faac0f-b0e9-4a3e-96a9-6bb58f283a21 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.163816] env[62522]: DEBUG nova.compute.provider_tree [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 809.233723] env[62522]: DEBUG nova.network.neutron [-] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.465362] env[62522]: DEBUG nova.network.neutron [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Successfully created port: 6f83c77d-45cc-446e-8a38-eb8a94e38f59 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 809.518448] env[62522]: DEBUG nova.compute.manager [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 809.543745] env[62522]: DEBUG nova.virt.hardware [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 809.544026] env[62522]: DEBUG nova.virt.hardware [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 809.544193] env[62522]: DEBUG nova.virt.hardware [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 809.544378] env[62522]: DEBUG nova.virt.hardware [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 809.544527] env[62522]: DEBUG nova.virt.hardware [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 809.544677] env[62522]: DEBUG nova.virt.hardware [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 809.544890] env[62522]: DEBUG nova.virt.hardware [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 809.545194] env[62522]: DEBUG nova.virt.hardware [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 809.545415] env[62522]: DEBUG nova.virt.hardware [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 809.545587] env[62522]: DEBUG nova.virt.hardware [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 809.545758] env[62522]: DEBUG nova.virt.hardware [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 809.546664] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23284908-4fdc-43f1-9873-951c0a0e055d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.551414] env[62522]: DEBUG nova.network.neutron [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Successfully created port: ded8b2e3-ee39-454b-97d6-5001bdbb8f72 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 809.558351] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c67c4cd-7309-4603-9d71-4da4f0798b8d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.584748] env[62522]: DEBUG nova.compute.manager [req-08f867f3-85c9-4a88-a175-13891ed8d260 req-6ff2bd38-df29-4750-8c8e-fbd7d71a1aff service nova] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Received event network-vif-deleted-aa9e5bf2-d99c-44ec-8c21-4aa8866616e0 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 809.603935] env[62522]: INFO nova.compute.manager [None req-3f69e34a-7662-4c89-b479-97d83e8a226d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Detaching volume fad13b91-225f-48a3-bc96-43a13bc45587 [ 809.646101] env[62522]: INFO nova.virt.block_device [None req-3f69e34a-7662-4c89-b479-97d83e8a226d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Attempting to driver detach volume fad13b91-225f-48a3-bc96-43a13bc45587 from mountpoint /dev/sdb [ 809.646371] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f69e34a-7662-4c89-b479-97d83e8a226d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Volume detach. Driver type: vmdk {{(pid=62522) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 809.646604] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f69e34a-7662-4c89-b479-97d83e8a226d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489692', 'volume_id': 'fad13b91-225f-48a3-bc96-43a13bc45587', 'name': 'volume-fad13b91-225f-48a3-bc96-43a13bc45587', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e813e7da-fd2c-4f10-b2f3-1e2b5c153a19', 'attached_at': '', 'detached_at': '', 'volume_id': 'fad13b91-225f-48a3-bc96-43a13bc45587', 'serial': 'fad13b91-225f-48a3-bc96-43a13bc45587'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 809.647502] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1748449-5c0d-4d54-b765-48b0c1880f52 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.676085] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e2e597b-238b-4b21-bcbe-5d5374773eea {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.684044] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1bfe7b6-0e05-4feb-846f-83cbd8997e19 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.705973] env[62522]: ERROR nova.scheduler.client.report [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [req-0a5748be-73da-4e95-a3fa-7681e4e9a0b8] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c7fa38b2-245d-4337-a012-22c1a01c0a72. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0a5748be-73da-4e95-a3fa-7681e4e9a0b8"}]} [ 809.706788] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11d63bd9-541b-4d33-aa04-1434272d4515 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.725395] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f69e34a-7662-4c89-b479-97d83e8a226d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] The volume has not been displaced from its original location: [datastore1] volume-fad13b91-225f-48a3-bc96-43a13bc45587/volume-fad13b91-225f-48a3-bc96-43a13bc45587.vmdk. No consolidation needed. {{(pid=62522) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 809.730659] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f69e34a-7662-4c89-b479-97d83e8a226d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Reconfiguring VM instance instance-00000022 to detach disk 2001 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 809.732013] env[62522]: DEBUG nova.scheduler.client.report [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Refreshing inventories for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 809.733941] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-272bf78e-3856-4908-835b-e4c7cd6ca163 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.746869] env[62522]: INFO nova.compute.manager [-] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Took 1.59 seconds to deallocate network for instance. [ 809.754777] env[62522]: DEBUG oslo_vmware.api [None req-3f69e34a-7662-4c89-b479-97d83e8a226d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Waiting for the task: (returnval){ [ 809.754777] env[62522]: value = "task-2415455" [ 809.754777] env[62522]: _type = "Task" [ 809.754777] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.763636] env[62522]: DEBUG oslo_vmware.api [None req-3f69e34a-7662-4c89-b479-97d83e8a226d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415455, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.764563] env[62522]: DEBUG nova.scheduler.client.report [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Updating ProviderTree inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 809.764772] env[62522]: DEBUG nova.compute.provider_tree [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 809.777184] env[62522]: DEBUG nova.scheduler.client.report [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Refreshing aggregate associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, aggregates: None {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 809.807901] env[62522]: DEBUG nova.scheduler.client.report [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Refreshing trait associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 810.001349] env[62522]: DEBUG nova.objects.instance [None req-428bf5e7-3b48-4b44-8cff-1c16c3fb5087 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Lazy-loading 'flavor' on Instance uuid d266aff3-42b4-4dcb-b8ca-7c13cdf8d314 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 810.080559] env[62522]: DEBUG nova.network.neutron [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Successfully created port: 30356a78-c3a1-4db1-8efa-ccd3f3e4afd1 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 810.260662] env[62522]: DEBUG oslo_concurrency.lockutils [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 810.266606] env[62522]: DEBUG oslo_vmware.api [None req-3f69e34a-7662-4c89-b479-97d83e8a226d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415455, 'name': ReconfigVM_Task, 'duration_secs': 0.235093} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.268915] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f69e34a-7662-4c89-b479-97d83e8a226d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Reconfigured VM instance instance-00000022 to detach disk 2001 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 810.278232] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad93dabc-2bdb-40cb-a89f-a00dbf2e012d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.300692] env[62522]: DEBUG oslo_vmware.api [None req-3f69e34a-7662-4c89-b479-97d83e8a226d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Waiting for the task: (returnval){ [ 810.300692] env[62522]: value = "task-2415456" [ 810.300692] env[62522]: _type = "Task" [ 810.300692] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.313610] env[62522]: DEBUG oslo_vmware.api [None req-3f69e34a-7662-4c89-b479-97d83e8a226d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415456, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.349024] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41bd03eb-efbd-4eb2-bb4e-72f220c637ef {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.356852] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09be2485-a5d2-4343-a806-da7a2e7f2ec9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.388735] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b91f3c-ee10-4426-8122-aa79a8e4f030 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.396418] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75041b4b-155d-435e-8e56-48c242fad468 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.409270] env[62522]: DEBUG nova.compute.provider_tree [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 810.506430] env[62522]: DEBUG oslo_concurrency.lockutils [None req-428bf5e7-3b48-4b44-8cff-1c16c3fb5087 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Acquiring lock "refresh_cache-d266aff3-42b4-4dcb-b8ca-7c13cdf8d314" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 810.506619] env[62522]: DEBUG oslo_concurrency.lockutils [None req-428bf5e7-3b48-4b44-8cff-1c16c3fb5087 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Acquired lock "refresh_cache-d266aff3-42b4-4dcb-b8ca-7c13cdf8d314" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.812754] env[62522]: DEBUG oslo_vmware.api [None req-3f69e34a-7662-4c89-b479-97d83e8a226d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415456, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.866550] env[62522]: DEBUG nova.network.neutron [None req-428bf5e7-3b48-4b44-8cff-1c16c3fb5087 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 810.953503] env[62522]: DEBUG nova.scheduler.client.report [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Updated inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with generation 76 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 810.953774] env[62522]: DEBUG nova.compute.provider_tree [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Updating resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 generation from 76 to 77 during operation: update_inventory {{(pid=62522) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 810.953926] env[62522]: DEBUG nova.compute.provider_tree [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 811.303875] env[62522]: DEBUG nova.network.neutron [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Successfully updated port: 6f83c77d-45cc-446e-8a38-eb8a94e38f59 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 811.315304] env[62522]: DEBUG oslo_vmware.api [None req-3f69e34a-7662-4c89-b479-97d83e8a226d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415456, 'name': ReconfigVM_Task, 'duration_secs': 0.748576} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.315603] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f69e34a-7662-4c89-b479-97d83e8a226d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489692', 'volume_id': 'fad13b91-225f-48a3-bc96-43a13bc45587', 'name': 'volume-fad13b91-225f-48a3-bc96-43a13bc45587', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e813e7da-fd2c-4f10-b2f3-1e2b5c153a19', 'attached_at': '', 'detached_at': '', 'volume_id': 'fad13b91-225f-48a3-bc96-43a13bc45587', 'serial': 'fad13b91-225f-48a3-bc96-43a13bc45587'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 811.461271] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.971s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.461943] env[62522]: DEBUG nova.compute.manager [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 811.465185] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.006s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.466646] env[62522]: INFO nova.compute.claims [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 811.793291] env[62522]: DEBUG nova.network.neutron [None req-428bf5e7-3b48-4b44-8cff-1c16c3fb5087 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Updating instance_info_cache with network_info: [{"id": "e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55", "address": "fa:16:3e:94:f4:ed", "network": {"id": "eb0d2ded-859d-46b3-843d-bb580d0bfb6b", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-831675108-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.192", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0dba307f1fbf48bfac98d9836a72254e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0fac98f-bf", "ovs_interfaceid": "e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.810715] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "refresh_cache-19d3d54c-5ba1-420f-b012-a08add8546c9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 811.812712] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired lock "refresh_cache-19d3d54c-5ba1-420f-b012-a08add8546c9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.812712] env[62522]: DEBUG nova.network.neutron [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 811.865463] env[62522]: DEBUG nova.objects.instance [None req-3f69e34a-7662-4c89-b479-97d83e8a226d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Lazy-loading 'flavor' on Instance uuid e813e7da-fd2c-4f10-b2f3-1e2b5c153a19 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 811.893670] env[62522]: DEBUG nova.network.neutron [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Successfully updated port: 3e725d96-bba9-4651-8fc1-70f66a94b0d1 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 811.972845] env[62522]: DEBUG nova.compute.utils [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 811.974444] env[62522]: DEBUG nova.compute.manager [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 811.974444] env[62522]: DEBUG nova.network.neutron [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 812.017478] env[62522]: DEBUG nova.policy [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f23d9647c4874ea0b3e6a6abf9d6202f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '686854cd52ce4809a4d315275260da54', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 812.144834] env[62522]: DEBUG nova.compute.manager [req-1565b9ad-7a4e-45ad-afb5-105fddb694e9 req-03502005-48df-4297-84a2-f188bade8ca0 service nova] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Received event network-vif-deleted-b2c9a42e-9c2a-40c7-9e58-882c7a354b9b {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 812.145455] env[62522]: DEBUG nova.compute.manager [req-1565b9ad-7a4e-45ad-afb5-105fddb694e9 req-03502005-48df-4297-84a2-f188bade8ca0 service nova] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Received event network-changed-e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 812.145455] env[62522]: DEBUG nova.compute.manager [req-1565b9ad-7a4e-45ad-afb5-105fddb694e9 req-03502005-48df-4297-84a2-f188bade8ca0 service nova] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Refreshing instance network info cache due to event network-changed-e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 812.145455] env[62522]: DEBUG oslo_concurrency.lockutils [req-1565b9ad-7a4e-45ad-afb5-105fddb694e9 req-03502005-48df-4297-84a2-f188bade8ca0 service nova] Acquiring lock "refresh_cache-d266aff3-42b4-4dcb-b8ca-7c13cdf8d314" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 812.297401] env[62522]: DEBUG oslo_concurrency.lockutils [None req-428bf5e7-3b48-4b44-8cff-1c16c3fb5087 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Releasing lock "refresh_cache-d266aff3-42b4-4dcb-b8ca-7c13cdf8d314" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.297659] env[62522]: DEBUG nova.compute.manager [None req-428bf5e7-3b48-4b44-8cff-1c16c3fb5087 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Inject network info {{(pid=62522) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 812.298593] env[62522]: DEBUG nova.compute.manager [None req-428bf5e7-3b48-4b44-8cff-1c16c3fb5087 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] network_info to inject: |[{"id": "e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55", "address": "fa:16:3e:94:f4:ed", "network": {"id": "eb0d2ded-859d-46b3-843d-bb580d0bfb6b", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-831675108-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.192", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0dba307f1fbf48bfac98d9836a72254e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0fac98f-bf", "ovs_interfaceid": "e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 812.304221] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-428bf5e7-3b48-4b44-8cff-1c16c3fb5087 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Reconfiguring VM instance to set the machine id {{(pid=62522) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 812.304221] env[62522]: DEBUG oslo_concurrency.lockutils [req-1565b9ad-7a4e-45ad-afb5-105fddb694e9 req-03502005-48df-4297-84a2-f188bade8ca0 service nova] Acquired lock "refresh_cache-d266aff3-42b4-4dcb-b8ca-7c13cdf8d314" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.304221] env[62522]: DEBUG nova.network.neutron [req-1565b9ad-7a4e-45ad-afb5-105fddb694e9 req-03502005-48df-4297-84a2-f188bade8ca0 service nova] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Refreshing network info cache for port e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 812.304221] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-624f527d-4628-41e2-b98e-bae185d4f113 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.324050] env[62522]: DEBUG oslo_vmware.api [None req-428bf5e7-3b48-4b44-8cff-1c16c3fb5087 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Waiting for the task: (returnval){ [ 812.324050] env[62522]: value = "task-2415457" [ 812.324050] env[62522]: _type = "Task" [ 812.324050] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.333047] env[62522]: DEBUG oslo_vmware.api [None req-428bf5e7-3b48-4b44-8cff-1c16c3fb5087 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Task: {'id': task-2415457, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.386591] env[62522]: WARNING nova.network.neutron [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] be0fe686-4986-439e-aa82-5cbe54104c8a already exists in list: networks containing: ['be0fe686-4986-439e-aa82-5cbe54104c8a']. ignoring it [ 812.419609] env[62522]: DEBUG nova.network.neutron [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Successfully created port: 1b7d6d1b-0d3c-47b3-8d93-c8cdc0b1e00f {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 812.480569] env[62522]: DEBUG nova.compute.manager [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 812.839900] env[62522]: DEBUG oslo_vmware.api [None req-428bf5e7-3b48-4b44-8cff-1c16c3fb5087 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Task: {'id': task-2415457, 'name': ReconfigVM_Task, 'duration_secs': 0.137359} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.840179] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-428bf5e7-3b48-4b44-8cff-1c16c3fb5087 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Reconfigured VM instance to set the machine id {{(pid=62522) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 812.877039] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3f69e34a-7662-4c89-b479-97d83e8a226d tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Lock "e813e7da-fd2c-4f10-b2f3-1e2b5c153a19" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.778s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 812.991563] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Acquiring lock "d266aff3-42b4-4dcb-b8ca-7c13cdf8d314" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.991829] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Lock "d266aff3-42b4-4dcb-b8ca-7c13cdf8d314" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.992040] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Acquiring lock "d266aff3-42b4-4dcb-b8ca-7c13cdf8d314-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.992229] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Lock "d266aff3-42b4-4dcb-b8ca-7c13cdf8d314-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.992397] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Lock "d266aff3-42b4-4dcb-b8ca-7c13cdf8d314-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 812.995767] env[62522]: INFO nova.compute.manager [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Terminating instance [ 813.064082] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54444bcf-c7e9-4d19-893e-d22bb4a15da7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.072052] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a2a1f57-56d2-4a4e-bb8f-e0229b1c25a7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.102761] env[62522]: DEBUG nova.network.neutron [req-1565b9ad-7a4e-45ad-afb5-105fddb694e9 req-03502005-48df-4297-84a2-f188bade8ca0 service nova] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Updated VIF entry in instance network info cache for port e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 813.103117] env[62522]: DEBUG nova.network.neutron [req-1565b9ad-7a4e-45ad-afb5-105fddb694e9 req-03502005-48df-4297-84a2-f188bade8ca0 service nova] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Updating instance_info_cache with network_info: [{"id": "e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55", "address": "fa:16:3e:94:f4:ed", "network": {"id": "eb0d2ded-859d-46b3-843d-bb580d0bfb6b", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-831675108-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.192", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0dba307f1fbf48bfac98d9836a72254e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0fac98f-bf", "ovs_interfaceid": "e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.104896] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df0a9f13-1a97-490e-a3b9-60d73c24d17f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.111586] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c395d38-3a1a-45f0-85e8-ce560f51e3fa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.125777] env[62522]: DEBUG nova.compute.provider_tree [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 813.388717] env[62522]: DEBUG nova.network.neutron [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Updating instance_info_cache with network_info: [{"id": "bb09cad6-a323-4801-8cb8-7e58b646a38e", "address": "fa:16:3e:c2:3c:c7", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.161", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb09cad6-a3", "ovs_interfaceid": "bb09cad6-a323-4801-8cb8-7e58b646a38e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "6f83c77d-45cc-446e-8a38-eb8a94e38f59", "address": "fa:16:3e:ce:74:67", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f83c77d-45", "ovs_interfaceid": "6f83c77d-45cc-446e-8a38-eb8a94e38f59", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.495304] env[62522]: DEBUG nova.compute.manager [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 813.501756] env[62522]: DEBUG nova.compute.manager [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 813.501981] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 813.502875] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1675ecc3-f34c-4478-8be1-57a1253809cc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.511281] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 813.511537] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-72a6d089-8208-4777-9704-b833afda937b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.520561] env[62522]: DEBUG nova.virt.hardware [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 813.520792] env[62522]: DEBUG nova.virt.hardware [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 813.520951] env[62522]: DEBUG nova.virt.hardware [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 813.521151] env[62522]: DEBUG nova.virt.hardware [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 813.521303] env[62522]: DEBUG nova.virt.hardware [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 813.521452] env[62522]: DEBUG nova.virt.hardware [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 813.521664] env[62522]: DEBUG nova.virt.hardware [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 813.521827] env[62522]: DEBUG nova.virt.hardware [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 813.521995] env[62522]: DEBUG nova.virt.hardware [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 813.522230] env[62522]: DEBUG nova.virt.hardware [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 813.522363] env[62522]: DEBUG nova.virt.hardware [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 813.523213] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02102c92-9149-4219-a940-49445255e03c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.527406] env[62522]: DEBUG oslo_vmware.api [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Waiting for the task: (returnval){ [ 813.527406] env[62522]: value = "task-2415458" [ 813.527406] env[62522]: _type = "Task" [ 813.527406] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.533756] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5584ebdc-f7ab-4866-adb5-d992aa5993c0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.541215] env[62522]: DEBUG oslo_vmware.api [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Task: {'id': task-2415458, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.608695] env[62522]: DEBUG oslo_concurrency.lockutils [req-1565b9ad-7a4e-45ad-afb5-105fddb694e9 req-03502005-48df-4297-84a2-f188bade8ca0 service nova] Releasing lock "refresh_cache-d266aff3-42b4-4dcb-b8ca-7c13cdf8d314" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 813.608993] env[62522]: DEBUG nova.compute.manager [req-1565b9ad-7a4e-45ad-afb5-105fddb694e9 req-03502005-48df-4297-84a2-f188bade8ca0 service nova] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Received event network-vif-plugged-6f83c77d-45cc-446e-8a38-eb8a94e38f59 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 813.609220] env[62522]: DEBUG oslo_concurrency.lockutils [req-1565b9ad-7a4e-45ad-afb5-105fddb694e9 req-03502005-48df-4297-84a2-f188bade8ca0 service nova] Acquiring lock "19d3d54c-5ba1-420f-b012-a08add8546c9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 813.609498] env[62522]: DEBUG oslo_concurrency.lockutils [req-1565b9ad-7a4e-45ad-afb5-105fddb694e9 req-03502005-48df-4297-84a2-f188bade8ca0 service nova] Lock "19d3d54c-5ba1-420f-b012-a08add8546c9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 813.609671] env[62522]: DEBUG oslo_concurrency.lockutils [req-1565b9ad-7a4e-45ad-afb5-105fddb694e9 req-03502005-48df-4297-84a2-f188bade8ca0 service nova] Lock "19d3d54c-5ba1-420f-b012-a08add8546c9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.609844] env[62522]: DEBUG nova.compute.manager [req-1565b9ad-7a4e-45ad-afb5-105fddb694e9 req-03502005-48df-4297-84a2-f188bade8ca0 service nova] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] No waiting events found dispatching network-vif-plugged-6f83c77d-45cc-446e-8a38-eb8a94e38f59 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 813.610029] env[62522]: WARNING nova.compute.manager [req-1565b9ad-7a4e-45ad-afb5-105fddb694e9 req-03502005-48df-4297-84a2-f188bade8ca0 service nova] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Received unexpected event network-vif-plugged-6f83c77d-45cc-446e-8a38-eb8a94e38f59 for instance with vm_state active and task_state None. [ 813.610201] env[62522]: DEBUG nova.compute.manager [req-1565b9ad-7a4e-45ad-afb5-105fddb694e9 req-03502005-48df-4297-84a2-f188bade8ca0 service nova] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Received event network-changed-6f83c77d-45cc-446e-8a38-eb8a94e38f59 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 813.610379] env[62522]: DEBUG nova.compute.manager [req-1565b9ad-7a4e-45ad-afb5-105fddb694e9 req-03502005-48df-4297-84a2-f188bade8ca0 service nova] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Refreshing instance network info cache due to event network-changed-6f83c77d-45cc-446e-8a38-eb8a94e38f59. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 813.610559] env[62522]: DEBUG oslo_concurrency.lockutils [req-1565b9ad-7a4e-45ad-afb5-105fddb694e9 req-03502005-48df-4297-84a2-f188bade8ca0 service nova] Acquiring lock "refresh_cache-19d3d54c-5ba1-420f-b012-a08add8546c9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 813.628642] env[62522]: DEBUG nova.scheduler.client.report [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 813.890979] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Releasing lock "refresh_cache-19d3d54c-5ba1-420f-b012-a08add8546c9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 813.891876] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "19d3d54c-5ba1-420f-b012-a08add8546c9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 813.892146] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired lock "19d3d54c-5ba1-420f-b012-a08add8546c9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.892543] env[62522]: DEBUG oslo_concurrency.lockutils [req-1565b9ad-7a4e-45ad-afb5-105fddb694e9 req-03502005-48df-4297-84a2-f188bade8ca0 service nova] Acquired lock "refresh_cache-19d3d54c-5ba1-420f-b012-a08add8546c9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.892801] env[62522]: DEBUG nova.network.neutron [req-1565b9ad-7a4e-45ad-afb5-105fddb694e9 req-03502005-48df-4297-84a2-f188bade8ca0 service nova] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Refreshing network info cache for port 6f83c77d-45cc-446e-8a38-eb8a94e38f59 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 813.896597] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de62bc38-cff8-4b18-bf83-b05928785d6f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.915178] env[62522]: DEBUG nova.virt.hardware [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 813.915584] env[62522]: DEBUG nova.virt.hardware [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 813.916039] env[62522]: DEBUG nova.virt.hardware [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 813.916338] env[62522]: DEBUG nova.virt.hardware [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 813.916605] env[62522]: DEBUG nova.virt.hardware [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 813.916868] env[62522]: DEBUG nova.virt.hardware [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 813.917225] env[62522]: DEBUG nova.virt.hardware [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 813.917540] env[62522]: DEBUG nova.virt.hardware [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 813.917849] env[62522]: DEBUG nova.virt.hardware [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 813.918151] env[62522]: DEBUG nova.virt.hardware [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 813.918460] env[62522]: DEBUG nova.virt.hardware [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 813.924847] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Reconfiguring VM to attach interface {{(pid=62522) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 813.925509] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-338f4637-d340-4f03-b6da-fd4242251df5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.944742] env[62522]: DEBUG oslo_vmware.api [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 813.944742] env[62522]: value = "task-2415459" [ 813.944742] env[62522]: _type = "Task" [ 813.944742] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.953497] env[62522]: DEBUG oslo_vmware.api [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415459, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.038066] env[62522]: DEBUG oslo_vmware.api [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Task: {'id': task-2415458, 'name': PowerOffVM_Task, 'duration_secs': 0.197325} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.038066] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 814.038066] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 814.038370] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-421596e0-fde0-4d18-8160-87181ca7591d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.100177] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 814.100580] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 814.100830] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Deleting the datastore file [datastore2] d266aff3-42b4-4dcb-b8ca-7c13cdf8d314 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 814.101205] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-12b829f7-ff65-41af-9e57-1291faab7f95 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.108436] env[62522]: DEBUG oslo_vmware.api [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Waiting for the task: (returnval){ [ 814.108436] env[62522]: value = "task-2415461" [ 814.108436] env[62522]: _type = "Task" [ 814.108436] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.117100] env[62522]: DEBUG oslo_vmware.api [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Task: {'id': task-2415461, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.134153] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.669s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.134741] env[62522]: DEBUG nova.compute.manager [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 814.138381] env[62522]: DEBUG oslo_concurrency.lockutils [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.141s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 814.139612] env[62522]: INFO nova.compute.claims [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 814.456476] env[62522]: DEBUG oslo_vmware.api [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415459, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.618531] env[62522]: DEBUG oslo_vmware.api [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Task: {'id': task-2415461, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144127} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.618738] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 814.618901] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 814.619094] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 814.619269] env[62522]: INFO nova.compute.manager [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Took 1.12 seconds to destroy the instance on the hypervisor. [ 814.619547] env[62522]: DEBUG oslo.service.loopingcall [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 814.619741] env[62522]: DEBUG nova.compute.manager [-] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 814.619838] env[62522]: DEBUG nova.network.neutron [-] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 814.647994] env[62522]: DEBUG nova.compute.utils [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 814.649508] env[62522]: DEBUG nova.compute.manager [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 814.649699] env[62522]: DEBUG nova.network.neutron [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 814.761736] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Acquiring lock "043a0a1b-268c-4caa-b1f7-cc7d70c3b314" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 814.761970] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Lock "043a0a1b-268c-4caa-b1f7-cc7d70c3b314" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 814.869184] env[62522]: DEBUG nova.policy [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0ab2b427a36245a1aff7d54759b5c660', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '261b19c87c79499abc8747804f1e04c9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 814.961866] env[62522]: DEBUG oslo_vmware.api [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415459, 'name': ReconfigVM_Task, 'duration_secs': 0.568355} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.962848] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Releasing lock "19d3d54c-5ba1-420f-b012-a08add8546c9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 814.963089] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Reconfigured VM to attach interface {{(pid=62522) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 814.984578] env[62522]: DEBUG nova.network.neutron [req-1565b9ad-7a4e-45ad-afb5-105fddb694e9 req-03502005-48df-4297-84a2-f188bade8ca0 service nova] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Updated VIF entry in instance network info cache for port 6f83c77d-45cc-446e-8a38-eb8a94e38f59. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 814.985109] env[62522]: DEBUG nova.network.neutron [req-1565b9ad-7a4e-45ad-afb5-105fddb694e9 req-03502005-48df-4297-84a2-f188bade8ca0 service nova] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Updating instance_info_cache with network_info: [{"id": "bb09cad6-a323-4801-8cb8-7e58b646a38e", "address": "fa:16:3e:c2:3c:c7", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.161", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb09cad6-a3", "ovs_interfaceid": "bb09cad6-a323-4801-8cb8-7e58b646a38e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "6f83c77d-45cc-446e-8a38-eb8a94e38f59", "address": "fa:16:3e:ce:74:67", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f83c77d-45", "ovs_interfaceid": "6f83c77d-45cc-446e-8a38-eb8a94e38f59", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.096550] env[62522]: DEBUG nova.compute.manager [req-60c1e008-7822-46c1-bde9-a4b80c2073f5 req-e5167310-68d4-4a90-9819-c46ac42ab414 service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Received event network-vif-plugged-3e725d96-bba9-4651-8fc1-70f66a94b0d1 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 815.096778] env[62522]: DEBUG oslo_concurrency.lockutils [req-60c1e008-7822-46c1-bde9-a4b80c2073f5 req-e5167310-68d4-4a90-9819-c46ac42ab414 service nova] Acquiring lock "a185273e-cdaf-4967-832b-f75014b7b3f4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 815.096994] env[62522]: DEBUG oslo_concurrency.lockutils [req-60c1e008-7822-46c1-bde9-a4b80c2073f5 req-e5167310-68d4-4a90-9819-c46ac42ab414 service nova] Lock "a185273e-cdaf-4967-832b-f75014b7b3f4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 815.105431] env[62522]: DEBUG oslo_concurrency.lockutils [req-60c1e008-7822-46c1-bde9-a4b80c2073f5 req-e5167310-68d4-4a90-9819-c46ac42ab414 service nova] Lock "a185273e-cdaf-4967-832b-f75014b7b3f4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.008s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.105665] env[62522]: DEBUG nova.compute.manager [req-60c1e008-7822-46c1-bde9-a4b80c2073f5 req-e5167310-68d4-4a90-9819-c46ac42ab414 service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] No waiting events found dispatching network-vif-plugged-3e725d96-bba9-4651-8fc1-70f66a94b0d1 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 815.105848] env[62522]: WARNING nova.compute.manager [req-60c1e008-7822-46c1-bde9-a4b80c2073f5 req-e5167310-68d4-4a90-9819-c46ac42ab414 service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Received unexpected event network-vif-plugged-3e725d96-bba9-4651-8fc1-70f66a94b0d1 for instance with vm_state building and task_state spawning. [ 815.106029] env[62522]: DEBUG nova.compute.manager [req-60c1e008-7822-46c1-bde9-a4b80c2073f5 req-e5167310-68d4-4a90-9819-c46ac42ab414 service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Received event network-changed-3e725d96-bba9-4651-8fc1-70f66a94b0d1 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 815.106199] env[62522]: DEBUG nova.compute.manager [req-60c1e008-7822-46c1-bde9-a4b80c2073f5 req-e5167310-68d4-4a90-9819-c46ac42ab414 service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Refreshing instance network info cache due to event network-changed-3e725d96-bba9-4651-8fc1-70f66a94b0d1. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 815.106404] env[62522]: DEBUG oslo_concurrency.lockutils [req-60c1e008-7822-46c1-bde9-a4b80c2073f5 req-e5167310-68d4-4a90-9819-c46ac42ab414 service nova] Acquiring lock "refresh_cache-a185273e-cdaf-4967-832b-f75014b7b3f4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 815.106544] env[62522]: DEBUG oslo_concurrency.lockutils [req-60c1e008-7822-46c1-bde9-a4b80c2073f5 req-e5167310-68d4-4a90-9819-c46ac42ab414 service nova] Acquired lock "refresh_cache-a185273e-cdaf-4967-832b-f75014b7b3f4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.106702] env[62522]: DEBUG nova.network.neutron [req-60c1e008-7822-46c1-bde9-a4b80c2073f5 req-e5167310-68d4-4a90-9819-c46ac42ab414 service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Refreshing network info cache for port 3e725d96-bba9-4651-8fc1-70f66a94b0d1 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 815.112290] env[62522]: DEBUG nova.network.neutron [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Successfully updated port: ded8b2e3-ee39-454b-97d6-5001bdbb8f72 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 815.125169] env[62522]: DEBUG nova.network.neutron [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Successfully updated port: 1b7d6d1b-0d3c-47b3-8d93-c8cdc0b1e00f {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 815.155045] env[62522]: DEBUG nova.compute.manager [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 815.430880] env[62522]: DEBUG nova.network.neutron [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Successfully created port: 119d9006-8624-413c-94e2-a9ed9cbba8cb {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 815.468015] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccd4a3a0-00da-47ad-8065-0b0119d28781 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "interface-19d3d54c-5ba1-420f-b012-a08add8546c9-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.025s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.488196] env[62522]: DEBUG oslo_concurrency.lockutils [req-1565b9ad-7a4e-45ad-afb5-105fddb694e9 req-03502005-48df-4297-84a2-f188bade8ca0 service nova] Releasing lock "refresh_cache-19d3d54c-5ba1-420f-b012-a08add8546c9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 815.628348] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquiring lock "refresh_cache-3c4c395c-0625-4569-990d-e2d4ad162c14" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 815.628490] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquired lock "refresh_cache-3c4c395c-0625-4569-990d-e2d4ad162c14" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.628641] env[62522]: DEBUG nova.network.neutron [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 815.722355] env[62522]: DEBUG nova.network.neutron [req-60c1e008-7822-46c1-bde9-a4b80c2073f5 req-e5167310-68d4-4a90-9819-c46ac42ab414 service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 815.805141] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d234153-720d-483e-a4fe-1727e121d2d5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.815602] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bd0b36c-70cc-490d-85d9-1ff448666854 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.853495] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7745d0a2-2629-4cdf-bfb4-1d714bf0a15f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.862112] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb5c9d2-191e-4933-b6f0-cadebb332230 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.877936] env[62522]: DEBUG nova.compute.provider_tree [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 815.933436] env[62522]: DEBUG nova.network.neutron [req-60c1e008-7822-46c1-bde9-a4b80c2073f5 req-e5167310-68d4-4a90-9819-c46ac42ab414 service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.124467] env[62522]: DEBUG nova.network.neutron [-] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.169441] env[62522]: DEBUG nova.compute.manager [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 816.172077] env[62522]: DEBUG nova.network.neutron [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 816.200008] env[62522]: DEBUG nova.virt.hardware [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 816.200280] env[62522]: DEBUG nova.virt.hardware [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 816.200467] env[62522]: DEBUG nova.virt.hardware [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 816.200751] env[62522]: DEBUG nova.virt.hardware [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 816.200842] env[62522]: DEBUG nova.virt.hardware [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 816.200936] env[62522]: DEBUG nova.virt.hardware [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 816.201224] env[62522]: DEBUG nova.virt.hardware [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 816.202115] env[62522]: DEBUG nova.virt.hardware [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 816.202115] env[62522]: DEBUG nova.virt.hardware [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 816.202115] env[62522]: DEBUG nova.virt.hardware [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 816.202115] env[62522]: DEBUG nova.virt.hardware [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 816.203065] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dc99ab4-c9e7-46a2-85d6-532e69d0b7d7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.211437] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0e4afff-3085-44b6-91db-cdb4d6d6666d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.351240] env[62522]: DEBUG nova.network.neutron [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Updating instance_info_cache with network_info: [{"id": "1b7d6d1b-0d3c-47b3-8d93-c8cdc0b1e00f", "address": "fa:16:3e:41:5d:d8", "network": {"id": "c3450427-ea7e-4a07-8399-53265d390e06", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1613138323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "686854cd52ce4809a4d315275260da54", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c42bb08a-77b4-4bba-8166-702cbb1b5f1e", "external-id": "nsx-vlan-transportzone-137", "segmentation_id": 137, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b7d6d1b-0d", "ovs_interfaceid": "1b7d6d1b-0d3c-47b3-8d93-c8cdc0b1e00f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.382093] env[62522]: DEBUG nova.scheduler.client.report [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 816.436510] env[62522]: DEBUG oslo_concurrency.lockutils [req-60c1e008-7822-46c1-bde9-a4b80c2073f5 req-e5167310-68d4-4a90-9819-c46ac42ab414 service nova] Releasing lock "refresh_cache-a185273e-cdaf-4967-832b-f75014b7b3f4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 816.519344] env[62522]: DEBUG nova.compute.manager [req-01c6fc20-73c6-448b-9b8b-5b11d2e18f27 req-455af55d-71f0-46d2-9144-3e16c77dcf77 service nova] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Received event network-vif-deleted-e0fac98f-bf02-4f4f-a4ee-97e1dbd85a55 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 816.628213] env[62522]: INFO nova.compute.manager [-] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Took 2.01 seconds to deallocate network for instance. [ 816.856468] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Releasing lock "refresh_cache-3c4c395c-0625-4569-990d-e2d4ad162c14" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 816.856983] env[62522]: DEBUG nova.compute.manager [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Instance network_info: |[{"id": "1b7d6d1b-0d3c-47b3-8d93-c8cdc0b1e00f", "address": "fa:16:3e:41:5d:d8", "network": {"id": "c3450427-ea7e-4a07-8399-53265d390e06", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1613138323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "686854cd52ce4809a4d315275260da54", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c42bb08a-77b4-4bba-8166-702cbb1b5f1e", "external-id": "nsx-vlan-transportzone-137", "segmentation_id": 137, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b7d6d1b-0d", "ovs_interfaceid": "1b7d6d1b-0d3c-47b3-8d93-c8cdc0b1e00f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 816.860238] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:5d:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c42bb08a-77b4-4bba-8166-702cbb1b5f1e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1b7d6d1b-0d3c-47b3-8d93-c8cdc0b1e00f', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 816.869514] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Creating folder: Project (686854cd52ce4809a4d315275260da54). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 816.869831] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d9294d0a-71da-40a8-a724-8762db6d2543 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.881257] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Created folder: Project (686854cd52ce4809a4d315275260da54) in parent group-v489562. [ 816.882031] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Creating folder: Instances. Parent ref: group-v489695. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 816.882031] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-28d7a461-d72c-48dc-9da4-8e558556f65f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.887646] env[62522]: DEBUG oslo_concurrency.lockutils [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.749s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 816.888161] env[62522]: DEBUG nova.compute.manager [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 816.891848] env[62522]: DEBUG oslo_concurrency.lockutils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.685s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 816.893314] env[62522]: INFO nova.compute.claims [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 816.897449] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Created folder: Instances in parent group-v489695. [ 816.897449] env[62522]: DEBUG oslo.service.loopingcall [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 816.897449] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 816.897449] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dfc547d9-460b-4b72-9c92-8e8aed1abdbb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.921335] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 816.921335] env[62522]: value = "task-2415464" [ 816.921335] env[62522]: _type = "Task" [ 816.921335] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.929201] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415464, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.033401] env[62522]: DEBUG nova.network.neutron [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Successfully updated port: 119d9006-8624-413c-94e2-a9ed9cbba8cb {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 817.134650] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 817.149151] env[62522]: DEBUG nova.compute.manager [req-177b65b8-d61e-418a-9d20-4bb765a4abfb req-138d3ddb-43cf-4c29-9386-4cdacd98745e service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Received event network-vif-plugged-ded8b2e3-ee39-454b-97d6-5001bdbb8f72 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 817.149557] env[62522]: DEBUG oslo_concurrency.lockutils [req-177b65b8-d61e-418a-9d20-4bb765a4abfb req-138d3ddb-43cf-4c29-9386-4cdacd98745e service nova] Acquiring lock "a185273e-cdaf-4967-832b-f75014b7b3f4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 817.150042] env[62522]: DEBUG oslo_concurrency.lockutils [req-177b65b8-d61e-418a-9d20-4bb765a4abfb req-138d3ddb-43cf-4c29-9386-4cdacd98745e service nova] Lock "a185273e-cdaf-4967-832b-f75014b7b3f4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.150388] env[62522]: DEBUG oslo_concurrency.lockutils [req-177b65b8-d61e-418a-9d20-4bb765a4abfb req-138d3ddb-43cf-4c29-9386-4cdacd98745e service nova] Lock "a185273e-cdaf-4967-832b-f75014b7b3f4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.150745] env[62522]: DEBUG nova.compute.manager [req-177b65b8-d61e-418a-9d20-4bb765a4abfb req-138d3ddb-43cf-4c29-9386-4cdacd98745e service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] No waiting events found dispatching network-vif-plugged-ded8b2e3-ee39-454b-97d6-5001bdbb8f72 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 817.151333] env[62522]: WARNING nova.compute.manager [req-177b65b8-d61e-418a-9d20-4bb765a4abfb req-138d3ddb-43cf-4c29-9386-4cdacd98745e service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Received unexpected event network-vif-plugged-ded8b2e3-ee39-454b-97d6-5001bdbb8f72 for instance with vm_state building and task_state spawning. [ 817.151882] env[62522]: DEBUG nova.compute.manager [req-177b65b8-d61e-418a-9d20-4bb765a4abfb req-138d3ddb-43cf-4c29-9386-4cdacd98745e service nova] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Received event network-vif-plugged-1b7d6d1b-0d3c-47b3-8d93-c8cdc0b1e00f {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 817.152184] env[62522]: DEBUG oslo_concurrency.lockutils [req-177b65b8-d61e-418a-9d20-4bb765a4abfb req-138d3ddb-43cf-4c29-9386-4cdacd98745e service nova] Acquiring lock "3c4c395c-0625-4569-990d-e2d4ad162c14-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 817.152492] env[62522]: DEBUG oslo_concurrency.lockutils [req-177b65b8-d61e-418a-9d20-4bb765a4abfb req-138d3ddb-43cf-4c29-9386-4cdacd98745e service nova] Lock "3c4c395c-0625-4569-990d-e2d4ad162c14-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.152768] env[62522]: DEBUG oslo_concurrency.lockutils [req-177b65b8-d61e-418a-9d20-4bb765a4abfb req-138d3ddb-43cf-4c29-9386-4cdacd98745e service nova] Lock "3c4c395c-0625-4569-990d-e2d4ad162c14-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.153047] env[62522]: DEBUG nova.compute.manager [req-177b65b8-d61e-418a-9d20-4bb765a4abfb req-138d3ddb-43cf-4c29-9386-4cdacd98745e service nova] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] No waiting events found dispatching network-vif-plugged-1b7d6d1b-0d3c-47b3-8d93-c8cdc0b1e00f {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 817.154113] env[62522]: WARNING nova.compute.manager [req-177b65b8-d61e-418a-9d20-4bb765a4abfb req-138d3ddb-43cf-4c29-9386-4cdacd98745e service nova] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Received unexpected event network-vif-plugged-1b7d6d1b-0d3c-47b3-8d93-c8cdc0b1e00f for instance with vm_state building and task_state spawning. [ 817.154113] env[62522]: DEBUG nova.compute.manager [req-177b65b8-d61e-418a-9d20-4bb765a4abfb req-138d3ddb-43cf-4c29-9386-4cdacd98745e service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Received event network-changed-ded8b2e3-ee39-454b-97d6-5001bdbb8f72 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 817.154113] env[62522]: DEBUG nova.compute.manager [req-177b65b8-d61e-418a-9d20-4bb765a4abfb req-138d3ddb-43cf-4c29-9386-4cdacd98745e service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Refreshing instance network info cache due to event network-changed-ded8b2e3-ee39-454b-97d6-5001bdbb8f72. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 817.154339] env[62522]: DEBUG oslo_concurrency.lockutils [req-177b65b8-d61e-418a-9d20-4bb765a4abfb req-138d3ddb-43cf-4c29-9386-4cdacd98745e service nova] Acquiring lock "refresh_cache-a185273e-cdaf-4967-832b-f75014b7b3f4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 817.154588] env[62522]: DEBUG oslo_concurrency.lockutils [req-177b65b8-d61e-418a-9d20-4bb765a4abfb req-138d3ddb-43cf-4c29-9386-4cdacd98745e service nova] Acquired lock "refresh_cache-a185273e-cdaf-4967-832b-f75014b7b3f4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.154868] env[62522]: DEBUG nova.network.neutron [req-177b65b8-d61e-418a-9d20-4bb765a4abfb req-138d3ddb-43cf-4c29-9386-4cdacd98745e service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Refreshing network info cache for port ded8b2e3-ee39-454b-97d6-5001bdbb8f72 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 817.397885] env[62522]: DEBUG nova.compute.utils [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 817.401408] env[62522]: DEBUG nova.compute.manager [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 817.401580] env[62522]: DEBUG nova.network.neutron [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 817.429176] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415464, 'name': CreateVM_Task, 'duration_secs': 0.355782} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.429373] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 817.430643] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 817.430818] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.431154] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 817.432175] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8336da5e-c5da-4e73-a3c5-4e86c4133e74 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.437222] env[62522]: DEBUG oslo_vmware.api [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 817.437222] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d66423-ad77-9f34-9ceb-0d3f2ef4b126" [ 817.437222] env[62522]: _type = "Task" [ 817.437222] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.446193] env[62522]: DEBUG oslo_vmware.api [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d66423-ad77-9f34-9ceb-0d3f2ef4b126, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.467772] env[62522]: DEBUG nova.policy [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fb193a7b00704d0d97429f6efc17ce98', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ec421e0535f04c2ba17759e8342e1897', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 817.538153] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Acquiring lock "refresh_cache-504396d8-077d-4563-91b5-a7a6259eea27" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 817.538473] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Acquired lock "refresh_cache-504396d8-077d-4563-91b5-a7a6259eea27" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.538473] env[62522]: DEBUG nova.network.neutron [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 817.583060] env[62522]: DEBUG nova.network.neutron [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Successfully updated port: 30356a78-c3a1-4db1-8efa-ccd3f3e4afd1 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 817.708440] env[62522]: DEBUG nova.network.neutron [req-177b65b8-d61e-418a-9d20-4bb765a4abfb req-138d3ddb-43cf-4c29-9386-4cdacd98745e service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 817.771852] env[62522]: DEBUG nova.network.neutron [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Successfully created port: 1e118d2e-4933-4fb5-8582-23601144447f {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 817.839204] env[62522]: DEBUG nova.network.neutron [req-177b65b8-d61e-418a-9d20-4bb765a4abfb req-138d3ddb-43cf-4c29-9386-4cdacd98745e service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.852331] env[62522]: DEBUG oslo_concurrency.lockutils [None req-39c05041-4a1f-4eeb-8032-42f8c82a3e1f tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "interface-19d3d54c-5ba1-420f-b012-a08add8546c9-6f83c77d-45cc-446e-8a38-eb8a94e38f59" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 817.852610] env[62522]: DEBUG oslo_concurrency.lockutils [None req-39c05041-4a1f-4eeb-8032-42f8c82a3e1f tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "interface-19d3d54c-5ba1-420f-b012-a08add8546c9-6f83c77d-45cc-446e-8a38-eb8a94e38f59" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.903216] env[62522]: DEBUG nova.compute.manager [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 817.958108] env[62522]: DEBUG oslo_vmware.api [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d66423-ad77-9f34-9ceb-0d3f2ef4b126, 'name': SearchDatastore_Task, 'duration_secs': 0.009981} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.958108] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 817.958108] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 817.958521] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 817.959333] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.959333] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 817.959333] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3119a134-b5c8-4794-a89f-ec080c800720 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.972352] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 817.972352] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 817.972352] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-587b0084-00df-4d76-ab82-75c38a70e430 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.977921] env[62522]: DEBUG oslo_vmware.api [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 817.977921] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52696bb1-b3ae-b2a3-9a6c-1d16f6feb749" [ 817.977921] env[62522]: _type = "Task" [ 817.977921] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.992112] env[62522]: DEBUG oslo_vmware.api [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52696bb1-b3ae-b2a3-9a6c-1d16f6feb749, 'name': SearchDatastore_Task, 'duration_secs': 0.00826} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.992866] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b13a5f04-0c59-4938-99a7-b20cb4143455 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.002737] env[62522]: DEBUG oslo_vmware.api [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 818.002737] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e1bb09-694f-bc53-a295-8f16d87019a4" [ 818.002737] env[62522]: _type = "Task" [ 818.002737] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.007216] env[62522]: DEBUG oslo_vmware.api [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e1bb09-694f-bc53-a295-8f16d87019a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.084785] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Acquiring lock "refresh_cache-a185273e-cdaf-4967-832b-f75014b7b3f4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.088734] env[62522]: DEBUG nova.network.neutron [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 818.273180] env[62522]: DEBUG nova.network.neutron [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Updating instance_info_cache with network_info: [{"id": "119d9006-8624-413c-94e2-a9ed9cbba8cb", "address": "fa:16:3e:7b:be:af", "network": {"id": "df5045c5-b236-4ab1-9176-c2718a81a0c6", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1851948476-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "261b19c87c79499abc8747804f1e04c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d986680e-ad16-45b1-bf6d-cd2fe661679f", "external-id": "nsx-vlan-transportzone-397", "segmentation_id": 397, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap119d9006-86", "ovs_interfaceid": "119d9006-8624-413c-94e2-a9ed9cbba8cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.342724] env[62522]: DEBUG oslo_concurrency.lockutils [req-177b65b8-d61e-418a-9d20-4bb765a4abfb req-138d3ddb-43cf-4c29-9386-4cdacd98745e service nova] Releasing lock "refresh_cache-a185273e-cdaf-4967-832b-f75014b7b3f4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 818.342973] env[62522]: DEBUG nova.compute.manager [req-177b65b8-d61e-418a-9d20-4bb765a4abfb req-138d3ddb-43cf-4c29-9386-4cdacd98745e service nova] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Received event network-changed-1b7d6d1b-0d3c-47b3-8d93-c8cdc0b1e00f {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 818.343214] env[62522]: DEBUG nova.compute.manager [req-177b65b8-d61e-418a-9d20-4bb765a4abfb req-138d3ddb-43cf-4c29-9386-4cdacd98745e service nova] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Refreshing instance network info cache due to event network-changed-1b7d6d1b-0d3c-47b3-8d93-c8cdc0b1e00f. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 818.343376] env[62522]: DEBUG oslo_concurrency.lockutils [req-177b65b8-d61e-418a-9d20-4bb765a4abfb req-138d3ddb-43cf-4c29-9386-4cdacd98745e service nova] Acquiring lock "refresh_cache-3c4c395c-0625-4569-990d-e2d4ad162c14" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.343524] env[62522]: DEBUG oslo_concurrency.lockutils [req-177b65b8-d61e-418a-9d20-4bb765a4abfb req-138d3ddb-43cf-4c29-9386-4cdacd98745e service nova] Acquired lock "refresh_cache-3c4c395c-0625-4569-990d-e2d4ad162c14" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.343681] env[62522]: DEBUG nova.network.neutron [req-177b65b8-d61e-418a-9d20-4bb765a4abfb req-138d3ddb-43cf-4c29-9386-4cdacd98745e service nova] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Refreshing network info cache for port 1b7d6d1b-0d3c-47b3-8d93-c8cdc0b1e00f {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 818.344956] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Acquired lock "refresh_cache-a185273e-cdaf-4967-832b-f75014b7b3f4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.345128] env[62522]: DEBUG nova.network.neutron [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 818.355754] env[62522]: DEBUG oslo_concurrency.lockutils [None req-39c05041-4a1f-4eeb-8032-42f8c82a3e1f tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "19d3d54c-5ba1-420f-b012-a08add8546c9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.355929] env[62522]: DEBUG oslo_concurrency.lockutils [None req-39c05041-4a1f-4eeb-8032-42f8c82a3e1f tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired lock "19d3d54c-5ba1-420f-b012-a08add8546c9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.359525] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1d821eb-244e-4199-97ca-bd5b41ef6cf0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.380998] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d969e55-9710-4d72-8237-833c9437d05b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.406451] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-39c05041-4a1f-4eeb-8032-42f8c82a3e1f tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Reconfiguring VM to detach interface {{(pid=62522) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 818.412382] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee41d292-2e9c-491e-8694-10d345274830 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.430513] env[62522]: DEBUG oslo_vmware.api [None req-39c05041-4a1f-4eeb-8032-42f8c82a3e1f tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 818.430513] env[62522]: value = "task-2415465" [ 818.430513] env[62522]: _type = "Task" [ 818.430513] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.440162] env[62522]: DEBUG oslo_vmware.api [None req-39c05041-4a1f-4eeb-8032-42f8c82a3e1f tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415465, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.481684] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cfd6ebe-6693-4bf3-b1c5-3378bc75fc92 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.489135] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4bc0c67-aa7e-4869-b7b2-05d95dcf6ca9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.522908] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7bd1bd8-c76f-43cb-93b9-13c687cbe05e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.530914] env[62522]: DEBUG oslo_vmware.api [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e1bb09-694f-bc53-a295-8f16d87019a4, 'name': SearchDatastore_Task, 'duration_secs': 0.009125} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.532991] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 818.533277] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 3c4c395c-0625-4569-990d-e2d4ad162c14/3c4c395c-0625-4569-990d-e2d4ad162c14.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 818.533542] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ed3415a2-f13a-4b6d-9558-0407198a2c0f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.536237] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fea17ee4-3555-48a6-ae29-cc5a9c71d72e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.549841] env[62522]: DEBUG nova.compute.provider_tree [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 818.552154] env[62522]: DEBUG oslo_vmware.api [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 818.552154] env[62522]: value = "task-2415466" [ 818.552154] env[62522]: _type = "Task" [ 818.552154] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.559564] env[62522]: DEBUG oslo_vmware.api [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2415466, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.595890] env[62522]: DEBUG nova.compute.manager [req-fed5792b-71b8-4752-85f7-e75885b95dfd req-004e5bc2-0362-4990-b827-fb6aa5da5759 service nova] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Received event network-vif-plugged-119d9006-8624-413c-94e2-a9ed9cbba8cb {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 818.596186] env[62522]: DEBUG oslo_concurrency.lockutils [req-fed5792b-71b8-4752-85f7-e75885b95dfd req-004e5bc2-0362-4990-b827-fb6aa5da5759 service nova] Acquiring lock "504396d8-077d-4563-91b5-a7a6259eea27-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 818.596265] env[62522]: DEBUG oslo_concurrency.lockutils [req-fed5792b-71b8-4752-85f7-e75885b95dfd req-004e5bc2-0362-4990-b827-fb6aa5da5759 service nova] Lock "504396d8-077d-4563-91b5-a7a6259eea27-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.596437] env[62522]: DEBUG oslo_concurrency.lockutils [req-fed5792b-71b8-4752-85f7-e75885b95dfd req-004e5bc2-0362-4990-b827-fb6aa5da5759 service nova] Lock "504396d8-077d-4563-91b5-a7a6259eea27-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 818.597036] env[62522]: DEBUG nova.compute.manager [req-fed5792b-71b8-4752-85f7-e75885b95dfd req-004e5bc2-0362-4990-b827-fb6aa5da5759 service nova] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] No waiting events found dispatching network-vif-plugged-119d9006-8624-413c-94e2-a9ed9cbba8cb {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 818.597036] env[62522]: WARNING nova.compute.manager [req-fed5792b-71b8-4752-85f7-e75885b95dfd req-004e5bc2-0362-4990-b827-fb6aa5da5759 service nova] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Received unexpected event network-vif-plugged-119d9006-8624-413c-94e2-a9ed9cbba8cb for instance with vm_state building and task_state spawning. [ 818.597036] env[62522]: DEBUG nova.compute.manager [req-fed5792b-71b8-4752-85f7-e75885b95dfd req-004e5bc2-0362-4990-b827-fb6aa5da5759 service nova] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Received event network-changed-119d9006-8624-413c-94e2-a9ed9cbba8cb {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 818.597281] env[62522]: DEBUG nova.compute.manager [req-fed5792b-71b8-4752-85f7-e75885b95dfd req-004e5bc2-0362-4990-b827-fb6aa5da5759 service nova] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Refreshing instance network info cache due to event network-changed-119d9006-8624-413c-94e2-a9ed9cbba8cb. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 818.597281] env[62522]: DEBUG oslo_concurrency.lockutils [req-fed5792b-71b8-4752-85f7-e75885b95dfd req-004e5bc2-0362-4990-b827-fb6aa5da5759 service nova] Acquiring lock "refresh_cache-504396d8-077d-4563-91b5-a7a6259eea27" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.777476] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Releasing lock "refresh_cache-504396d8-077d-4563-91b5-a7a6259eea27" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 818.777856] env[62522]: DEBUG nova.compute.manager [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Instance network_info: |[{"id": "119d9006-8624-413c-94e2-a9ed9cbba8cb", "address": "fa:16:3e:7b:be:af", "network": {"id": "df5045c5-b236-4ab1-9176-c2718a81a0c6", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1851948476-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "261b19c87c79499abc8747804f1e04c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d986680e-ad16-45b1-bf6d-cd2fe661679f", "external-id": "nsx-vlan-transportzone-397", "segmentation_id": 397, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap119d9006-86", "ovs_interfaceid": "119d9006-8624-413c-94e2-a9ed9cbba8cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 818.778249] env[62522]: DEBUG oslo_concurrency.lockutils [req-fed5792b-71b8-4752-85f7-e75885b95dfd req-004e5bc2-0362-4990-b827-fb6aa5da5759 service nova] Acquired lock "refresh_cache-504396d8-077d-4563-91b5-a7a6259eea27" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.778532] env[62522]: DEBUG nova.network.neutron [req-fed5792b-71b8-4752-85f7-e75885b95dfd req-004e5bc2-0362-4990-b827-fb6aa5da5759 service nova] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Refreshing network info cache for port 119d9006-8624-413c-94e2-a9ed9cbba8cb {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 818.780290] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7b:be:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd986680e-ad16-45b1-bf6d-cd2fe661679f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '119d9006-8624-413c-94e2-a9ed9cbba8cb', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 818.789218] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Creating folder: Project (261b19c87c79499abc8747804f1e04c9). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 818.790682] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2287a27f-9502-4c56-a327-393d5910767a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.802844] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Created folder: Project (261b19c87c79499abc8747804f1e04c9) in parent group-v489562. [ 818.803120] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Creating folder: Instances. Parent ref: group-v489698. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 818.803393] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6c43b11d-2158-4567-999e-710ce48c6a02 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.813880] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Created folder: Instances in parent group-v489698. [ 818.814677] env[62522]: DEBUG oslo.service.loopingcall [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 818.815064] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 818.815470] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-53bd14cd-c6fe-43e5-95ba-25d49fc33f8f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.837376] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 818.837376] env[62522]: value = "task-2415469" [ 818.837376] env[62522]: _type = "Task" [ 818.837376] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.851090] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415469, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.927688] env[62522]: DEBUG nova.compute.manager [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 818.932268] env[62522]: DEBUG nova.network.neutron [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 818.949589] env[62522]: DEBUG oslo_vmware.api [None req-39c05041-4a1f-4eeb-8032-42f8c82a3e1f tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415465, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.967500] env[62522]: DEBUG nova.virt.hardware [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 818.968057] env[62522]: DEBUG nova.virt.hardware [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 818.968410] env[62522]: DEBUG nova.virt.hardware [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 818.968936] env[62522]: DEBUG nova.virt.hardware [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 818.969656] env[62522]: DEBUG nova.virt.hardware [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 818.972198] env[62522]: DEBUG nova.virt.hardware [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 818.972198] env[62522]: DEBUG nova.virt.hardware [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 818.972198] env[62522]: DEBUG nova.virt.hardware [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 818.972198] env[62522]: DEBUG nova.virt.hardware [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 818.972198] env[62522]: DEBUG nova.virt.hardware [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 818.972198] env[62522]: DEBUG nova.virt.hardware [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 818.972839] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70f82b78-897b-4bb6-87ed-2c5561eb74c5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.985565] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d25af1b-6ce6-4eda-9e98-b16a977e8498 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.053736] env[62522]: DEBUG nova.scheduler.client.report [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 819.067492] env[62522]: DEBUG oslo_vmware.api [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2415466, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.468186} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.070055] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 3c4c395c-0625-4569-990d-e2d4ad162c14/3c4c395c-0625-4569-990d-e2d4ad162c14.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 819.070900] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 819.070900] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d6cf8386-e3d2-4ee7-8755-84868c60312e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.077384] env[62522]: DEBUG oslo_vmware.api [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 819.077384] env[62522]: value = "task-2415470" [ 819.077384] env[62522]: _type = "Task" [ 819.077384] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.086421] env[62522]: DEBUG oslo_vmware.api [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2415470, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.351373] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415469, 'name': CreateVM_Task, 'duration_secs': 0.401879} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.352668] env[62522]: DEBUG nova.network.neutron [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Successfully updated port: 1e118d2e-4933-4fb5-8582-23601144447f {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 819.354639] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 819.358020] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.358020] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.358020] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 819.358020] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64ebfc93-4475-4479-942b-69e76dff51d5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.363749] env[62522]: DEBUG oslo_vmware.api [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Waiting for the task: (returnval){ [ 819.363749] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]524827c5-db14-54ec-cf7e-5dab7c1f846d" [ 819.363749] env[62522]: _type = "Task" [ 819.363749] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.374429] env[62522]: DEBUG oslo_vmware.api [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]524827c5-db14-54ec-cf7e-5dab7c1f846d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.444900] env[62522]: DEBUG oslo_vmware.api [None req-39c05041-4a1f-4eeb-8032-42f8c82a3e1f tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415465, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.517980] env[62522]: DEBUG nova.network.neutron [req-fed5792b-71b8-4752-85f7-e75885b95dfd req-004e5bc2-0362-4990-b827-fb6aa5da5759 service nova] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Updated VIF entry in instance network info cache for port 119d9006-8624-413c-94e2-a9ed9cbba8cb. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 819.518548] env[62522]: DEBUG nova.network.neutron [req-fed5792b-71b8-4752-85f7-e75885b95dfd req-004e5bc2-0362-4990-b827-fb6aa5da5759 service nova] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Updating instance_info_cache with network_info: [{"id": "119d9006-8624-413c-94e2-a9ed9cbba8cb", "address": "fa:16:3e:7b:be:af", "network": {"id": "df5045c5-b236-4ab1-9176-c2718a81a0c6", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1851948476-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "261b19c87c79499abc8747804f1e04c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d986680e-ad16-45b1-bf6d-cd2fe661679f", "external-id": "nsx-vlan-transportzone-397", "segmentation_id": 397, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap119d9006-86", "ovs_interfaceid": "119d9006-8624-413c-94e2-a9ed9cbba8cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.563080] env[62522]: DEBUG oslo_concurrency.lockutils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.671s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 819.566062] env[62522]: DEBUG nova.compute.manager [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 819.566955] env[62522]: DEBUG oslo_concurrency.lockutils [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 39.851s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 819.567320] env[62522]: DEBUG nova.objects.instance [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62522) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 819.596537] env[62522]: DEBUG oslo_vmware.api [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2415470, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071516} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.597747] env[62522]: DEBUG nova.network.neutron [req-177b65b8-d61e-418a-9d20-4bb765a4abfb req-138d3ddb-43cf-4c29-9386-4cdacd98745e service nova] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Updated VIF entry in instance network info cache for port 1b7d6d1b-0d3c-47b3-8d93-c8cdc0b1e00f. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 819.598489] env[62522]: DEBUG nova.network.neutron [req-177b65b8-d61e-418a-9d20-4bb765a4abfb req-138d3ddb-43cf-4c29-9386-4cdacd98745e service nova] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Updating instance_info_cache with network_info: [{"id": "1b7d6d1b-0d3c-47b3-8d93-c8cdc0b1e00f", "address": "fa:16:3e:41:5d:d8", "network": {"id": "c3450427-ea7e-4a07-8399-53265d390e06", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1613138323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "686854cd52ce4809a4d315275260da54", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c42bb08a-77b4-4bba-8166-702cbb1b5f1e", "external-id": "nsx-vlan-transportzone-137", "segmentation_id": 137, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b7d6d1b-0d", "ovs_interfaceid": "1b7d6d1b-0d3c-47b3-8d93-c8cdc0b1e00f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.602026] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 819.602026] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d789d221-6559-46d8-b518-7c9debe8cc10 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.608278] env[62522]: DEBUG nova.compute.manager [req-76945c4d-75ba-4202-a904-8dbca123d1a4 req-565bbcf2-6b5b-4cbd-9115-292250b44718 service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Received event network-vif-plugged-30356a78-c3a1-4db1-8efa-ccd3f3e4afd1 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 819.608527] env[62522]: DEBUG oslo_concurrency.lockutils [req-76945c4d-75ba-4202-a904-8dbca123d1a4 req-565bbcf2-6b5b-4cbd-9115-292250b44718 service nova] Acquiring lock "a185273e-cdaf-4967-832b-f75014b7b3f4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 819.609300] env[62522]: DEBUG oslo_concurrency.lockutils [req-76945c4d-75ba-4202-a904-8dbca123d1a4 req-565bbcf2-6b5b-4cbd-9115-292250b44718 service nova] Lock "a185273e-cdaf-4967-832b-f75014b7b3f4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 819.609522] env[62522]: DEBUG oslo_concurrency.lockutils [req-76945c4d-75ba-4202-a904-8dbca123d1a4 req-565bbcf2-6b5b-4cbd-9115-292250b44718 service nova] Lock "a185273e-cdaf-4967-832b-f75014b7b3f4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 819.609701] env[62522]: DEBUG nova.compute.manager [req-76945c4d-75ba-4202-a904-8dbca123d1a4 req-565bbcf2-6b5b-4cbd-9115-292250b44718 service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] No waiting events found dispatching network-vif-plugged-30356a78-c3a1-4db1-8efa-ccd3f3e4afd1 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 819.609925] env[62522]: WARNING nova.compute.manager [req-76945c4d-75ba-4202-a904-8dbca123d1a4 req-565bbcf2-6b5b-4cbd-9115-292250b44718 service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Received unexpected event network-vif-plugged-30356a78-c3a1-4db1-8efa-ccd3f3e4afd1 for instance with vm_state building and task_state spawning. [ 819.610114] env[62522]: DEBUG nova.compute.manager [req-76945c4d-75ba-4202-a904-8dbca123d1a4 req-565bbcf2-6b5b-4cbd-9115-292250b44718 service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Received event network-changed-30356a78-c3a1-4db1-8efa-ccd3f3e4afd1 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 819.610285] env[62522]: DEBUG nova.compute.manager [req-76945c4d-75ba-4202-a904-8dbca123d1a4 req-565bbcf2-6b5b-4cbd-9115-292250b44718 service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Refreshing instance network info cache due to event network-changed-30356a78-c3a1-4db1-8efa-ccd3f3e4afd1. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 819.610482] env[62522]: DEBUG oslo_concurrency.lockutils [req-76945c4d-75ba-4202-a904-8dbca123d1a4 req-565bbcf2-6b5b-4cbd-9115-292250b44718 service nova] Acquiring lock "refresh_cache-a185273e-cdaf-4967-832b-f75014b7b3f4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.630539] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Reconfiguring VM instance instance-0000002d to attach disk [datastore2] 3c4c395c-0625-4569-990d-e2d4ad162c14/3c4c395c-0625-4569-990d-e2d4ad162c14.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 819.633474] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fbb517fc-2ced-4b7c-8658-8e05cebf33c3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.656158] env[62522]: DEBUG oslo_vmware.api [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 819.656158] env[62522]: value = "task-2415471" [ 819.656158] env[62522]: _type = "Task" [ 819.656158] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.665573] env[62522]: DEBUG oslo_vmware.api [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2415471, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.856508] env[62522]: DEBUG oslo_concurrency.lockutils [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Acquiring lock "refresh_cache-76cb551e-e605-4c80-a6ef-e36681fc0bc2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.856660] env[62522]: DEBUG oslo_concurrency.lockutils [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Acquired lock "refresh_cache-76cb551e-e605-4c80-a6ef-e36681fc0bc2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.856920] env[62522]: DEBUG nova.network.neutron [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 819.874860] env[62522]: DEBUG oslo_vmware.api [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]524827c5-db14-54ec-cf7e-5dab7c1f846d, 'name': SearchDatastore_Task, 'duration_secs': 0.010379} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.875174] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.875455] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 819.875622] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.875804] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.875939] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 819.877250] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a9d0978b-3538-4f80-861c-c0fd33c4b67d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.885826] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 819.886010] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 819.886701] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb7fb18e-0f77-4b13-aaae-4fe9a12a8d85 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.891797] env[62522]: DEBUG oslo_vmware.api [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Waiting for the task: (returnval){ [ 819.891797] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c002a1-3161-6a3b-1852-aef85c6d6d96" [ 819.891797] env[62522]: _type = "Task" [ 819.891797] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.899932] env[62522]: DEBUG oslo_vmware.api [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c002a1-3161-6a3b-1852-aef85c6d6d96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.944554] env[62522]: DEBUG oslo_vmware.api [None req-39c05041-4a1f-4eeb-8032-42f8c82a3e1f tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415465, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.021665] env[62522]: DEBUG oslo_concurrency.lockutils [req-fed5792b-71b8-4752-85f7-e75885b95dfd req-004e5bc2-0362-4990-b827-fb6aa5da5759 service nova] Releasing lock "refresh_cache-504396d8-077d-4563-91b5-a7a6259eea27" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 820.073562] env[62522]: DEBUG nova.compute.utils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 820.075015] env[62522]: DEBUG nova.compute.manager [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 820.075281] env[62522]: DEBUG nova.network.neutron [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 820.084006] env[62522]: DEBUG nova.network.neutron [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Updating instance_info_cache with network_info: [{"id": "3e725d96-bba9-4651-8fc1-70f66a94b0d1", "address": "fa:16:3e:a2:a8:c7", "network": {"id": "d661d493-fae5-4b41-95f1-78fcf7ded492", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1318459925", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.145", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ca7e42d226a4ef6b48b882356da8950", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e725d96-bb", "ovs_interfaceid": "3e725d96-bba9-4651-8fc1-70f66a94b0d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ded8b2e3-ee39-454b-97d6-5001bdbb8f72", "address": "fa:16:3e:a6:42:a0", "network": {"id": "2402eaa3-cb14-4b01-a2ba-0d026901bac0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1525415661", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.202", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "3ca7e42d226a4ef6b48b882356da8950", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapded8b2e3-ee", "ovs_interfaceid": "ded8b2e3-ee39-454b-97d6-5001bdbb8f72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "30356a78-c3a1-4db1-8efa-ccd3f3e4afd1", "address": "fa:16:3e:8d:7a:82", "network": {"id": "d661d493-fae5-4b41-95f1-78fcf7ded492", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1318459925", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.131", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ca7e42d226a4ef6b48b882356da8950", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30356a78-c3", "ovs_interfaceid": "30356a78-c3a1-4db1-8efa-ccd3f3e4afd1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.102092] env[62522]: DEBUG oslo_concurrency.lockutils [req-177b65b8-d61e-418a-9d20-4bb765a4abfb req-138d3ddb-43cf-4c29-9386-4cdacd98745e service nova] Releasing lock "refresh_cache-3c4c395c-0625-4569-990d-e2d4ad162c14" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 820.161642] env[62522]: DEBUG nova.policy [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e3ce17ca2f0d457c8768549d66b1400a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e47f8c538134439d8405e2825ad0af22', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 820.168682] env[62522]: DEBUG oslo_vmware.api [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2415471, 'name': ReconfigVM_Task, 'duration_secs': 0.308657} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.168947] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Reconfigured VM instance instance-0000002d to attach disk [datastore2] 3c4c395c-0625-4569-990d-e2d4ad162c14/3c4c395c-0625-4569-990d-e2d4ad162c14.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 820.169597] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ec1f4483-4313-4127-8310-59d587a1a752 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.179587] env[62522]: DEBUG oslo_vmware.api [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 820.179587] env[62522]: value = "task-2415472" [ 820.179587] env[62522]: _type = "Task" [ 820.179587] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.192656] env[62522]: DEBUG oslo_vmware.api [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2415472, 'name': Rename_Task} progress is 10%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.405025] env[62522]: DEBUG oslo_vmware.api [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c002a1-3161-6a3b-1852-aef85c6d6d96, 'name': SearchDatastore_Task, 'duration_secs': 0.00918} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.405025] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf19e84a-cbb3-4760-85bf-bae02a95083b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.408193] env[62522]: DEBUG oslo_vmware.api [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Waiting for the task: (returnval){ [ 820.408193] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52985b59-5911-bbc1-0dc6-80c8a22fd178" [ 820.408193] env[62522]: _type = "Task" [ 820.408193] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.416497] env[62522]: DEBUG oslo_vmware.api [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52985b59-5911-bbc1-0dc6-80c8a22fd178, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.417689] env[62522]: DEBUG nova.network.neutron [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 820.447137] env[62522]: DEBUG oslo_vmware.api [None req-39c05041-4a1f-4eeb-8032-42f8c82a3e1f tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415465, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.584024] env[62522]: DEBUG nova.compute.manager [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 820.585997] env[62522]: DEBUG oslo_concurrency.lockutils [None req-11da0dc7-3efc-4574-8bfd-d5bf75c0d77e tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.019s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 820.587268] env[62522]: DEBUG oslo_concurrency.lockutils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.170s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 820.589032] env[62522]: INFO nova.compute.claims [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 820.591783] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Releasing lock "refresh_cache-a185273e-cdaf-4967-832b-f75014b7b3f4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 820.592290] env[62522]: DEBUG nova.compute.manager [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Instance network_info: |[{"id": "3e725d96-bba9-4651-8fc1-70f66a94b0d1", "address": "fa:16:3e:a2:a8:c7", "network": {"id": "d661d493-fae5-4b41-95f1-78fcf7ded492", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1318459925", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.145", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ca7e42d226a4ef6b48b882356da8950", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e725d96-bb", "ovs_interfaceid": "3e725d96-bba9-4651-8fc1-70f66a94b0d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ded8b2e3-ee39-454b-97d6-5001bdbb8f72", "address": "fa:16:3e:a6:42:a0", "network": {"id": "2402eaa3-cb14-4b01-a2ba-0d026901bac0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1525415661", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.202", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "3ca7e42d226a4ef6b48b882356da8950", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapded8b2e3-ee", "ovs_interfaceid": "ded8b2e3-ee39-454b-97d6-5001bdbb8f72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "30356a78-c3a1-4db1-8efa-ccd3f3e4afd1", "address": "fa:16:3e:8d:7a:82", "network": {"id": "d661d493-fae5-4b41-95f1-78fcf7ded492", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1318459925", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.131", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ca7e42d226a4ef6b48b882356da8950", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30356a78-c3", "ovs_interfaceid": "30356a78-c3a1-4db1-8efa-ccd3f3e4afd1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 820.593248] env[62522]: DEBUG oslo_concurrency.lockutils [req-76945c4d-75ba-4202-a904-8dbca123d1a4 req-565bbcf2-6b5b-4cbd-9115-292250b44718 service nova] Acquired lock "refresh_cache-a185273e-cdaf-4967-832b-f75014b7b3f4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.593623] env[62522]: DEBUG nova.network.neutron [req-76945c4d-75ba-4202-a904-8dbca123d1a4 req-565bbcf2-6b5b-4cbd-9115-292250b44718 service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Refreshing network info cache for port 30356a78-c3a1-4db1-8efa-ccd3f3e4afd1 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 820.594741] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a2:a8:c7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1fb81f98-6f5a-47ab-a512-27277591d064', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3e725d96-bba9-4651-8fc1-70f66a94b0d1', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:a6:42:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f17856cf-7248-414b-bde6-8c90cfb4c593', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ded8b2e3-ee39-454b-97d6-5001bdbb8f72', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:7a:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1fb81f98-6f5a-47ab-a512-27277591d064', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '30356a78-c3a1-4db1-8efa-ccd3f3e4afd1', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 820.609238] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Creating folder: Project (3ca7e42d226a4ef6b48b882356da8950). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 820.612939] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cd0317bf-72c4-4464-a9e3-a850dd6e75f1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.626020] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Created folder: Project (3ca7e42d226a4ef6b48b882356da8950) in parent group-v489562. [ 820.626020] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Creating folder: Instances. Parent ref: group-v489701. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 820.626020] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4047df56-e8ec-481d-874d-7822716a8486 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.633882] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Created folder: Instances in parent group-v489701. [ 820.634259] env[62522]: DEBUG oslo.service.loopingcall [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 820.634555] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 820.634863] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e0a68c99-758e-4f49-9acb-7f954f54cf33 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.665775] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 820.665775] env[62522]: value = "task-2415475" [ 820.665775] env[62522]: _type = "Task" [ 820.665775] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.675740] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415475, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.688191] env[62522]: DEBUG oslo_vmware.api [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2415472, 'name': Rename_Task, 'duration_secs': 0.152873} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.688680] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 820.688926] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6904dfeb-e227-4956-8577-eabf91c0232c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.697210] env[62522]: DEBUG oslo_vmware.api [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 820.697210] env[62522]: value = "task-2415476" [ 820.697210] env[62522]: _type = "Task" [ 820.697210] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.704576] env[62522]: DEBUG oslo_vmware.api [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2415476, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.719188] env[62522]: DEBUG nova.network.neutron [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Updating instance_info_cache with network_info: [{"id": "1e118d2e-4933-4fb5-8582-23601144447f", "address": "fa:16:3e:66:01:ad", "network": {"id": "27951c52-e28e-4c94-968c-c1b5ddd6b58b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1545103257-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ec421e0535f04c2ba17759e8342e1897", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e118d2e-49", "ovs_interfaceid": "1e118d2e-4933-4fb5-8582-23601144447f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.740271] env[62522]: DEBUG nova.compute.manager [req-93665d5f-4f5c-4ce2-b93e-4d9021fbf465 req-4b9e777e-bddc-47db-85a8-f78787f553bc service nova] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Received event network-vif-plugged-1e118d2e-4933-4fb5-8582-23601144447f {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 820.740465] env[62522]: DEBUG oslo_concurrency.lockutils [req-93665d5f-4f5c-4ce2-b93e-4d9021fbf465 req-4b9e777e-bddc-47db-85a8-f78787f553bc service nova] Acquiring lock "76cb551e-e605-4c80-a6ef-e36681fc0bc2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.741104] env[62522]: DEBUG oslo_concurrency.lockutils [req-93665d5f-4f5c-4ce2-b93e-4d9021fbf465 req-4b9e777e-bddc-47db-85a8-f78787f553bc service nova] Lock "76cb551e-e605-4c80-a6ef-e36681fc0bc2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 820.741104] env[62522]: DEBUG oslo_concurrency.lockutils [req-93665d5f-4f5c-4ce2-b93e-4d9021fbf465 req-4b9e777e-bddc-47db-85a8-f78787f553bc service nova] Lock "76cb551e-e605-4c80-a6ef-e36681fc0bc2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 820.741220] env[62522]: DEBUG nova.compute.manager [req-93665d5f-4f5c-4ce2-b93e-4d9021fbf465 req-4b9e777e-bddc-47db-85a8-f78787f553bc service nova] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] No waiting events found dispatching network-vif-plugged-1e118d2e-4933-4fb5-8582-23601144447f {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 820.741469] env[62522]: WARNING nova.compute.manager [req-93665d5f-4f5c-4ce2-b93e-4d9021fbf465 req-4b9e777e-bddc-47db-85a8-f78787f553bc service nova] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Received unexpected event network-vif-plugged-1e118d2e-4933-4fb5-8582-23601144447f for instance with vm_state building and task_state spawning. [ 820.741719] env[62522]: DEBUG nova.compute.manager [req-93665d5f-4f5c-4ce2-b93e-4d9021fbf465 req-4b9e777e-bddc-47db-85a8-f78787f553bc service nova] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Received event network-changed-1e118d2e-4933-4fb5-8582-23601144447f {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 820.741951] env[62522]: DEBUG nova.compute.manager [req-93665d5f-4f5c-4ce2-b93e-4d9021fbf465 req-4b9e777e-bddc-47db-85a8-f78787f553bc service nova] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Refreshing instance network info cache due to event network-changed-1e118d2e-4933-4fb5-8582-23601144447f. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 820.742153] env[62522]: DEBUG oslo_concurrency.lockutils [req-93665d5f-4f5c-4ce2-b93e-4d9021fbf465 req-4b9e777e-bddc-47db-85a8-f78787f553bc service nova] Acquiring lock "refresh_cache-76cb551e-e605-4c80-a6ef-e36681fc0bc2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 820.746926] env[62522]: DEBUG nova.network.neutron [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Successfully created port: 680f73c8-8196-4790-84fe-eb56b69413df {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 820.919986] env[62522]: DEBUG oslo_vmware.api [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52985b59-5911-bbc1-0dc6-80c8a22fd178, 'name': SearchDatastore_Task, 'duration_secs': 0.009241} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.920282] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 820.920661] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 504396d8-077d-4563-91b5-a7a6259eea27/504396d8-077d-4563-91b5-a7a6259eea27.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 820.920821] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-114d42ed-d58e-47f4-a207-c5cd52ca26ba {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.927119] env[62522]: DEBUG oslo_vmware.api [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Waiting for the task: (returnval){ [ 820.927119] env[62522]: value = "task-2415477" [ 820.927119] env[62522]: _type = "Task" [ 820.927119] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.935815] env[62522]: DEBUG oslo_vmware.api [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415477, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.945698] env[62522]: DEBUG oslo_vmware.api [None req-39c05041-4a1f-4eeb-8032-42f8c82a3e1f tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415465, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.176822] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415475, 'name': CreateVM_Task, 'duration_secs': 0.43872} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.176995] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 821.177967] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 821.178148] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.178484] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 821.179088] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aad1844c-ffb5-4902-abfb-4dbac58c3ff5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.185802] env[62522]: DEBUG oslo_vmware.api [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Waiting for the task: (returnval){ [ 821.185802] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5284913e-4abb-ffa1-bf19-cd37782d1c5c" [ 821.185802] env[62522]: _type = "Task" [ 821.185802] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.196442] env[62522]: DEBUG oslo_vmware.api [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5284913e-4abb-ffa1-bf19-cd37782d1c5c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.207784] env[62522]: DEBUG oslo_vmware.api [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2415476, 'name': PowerOnVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.224115] env[62522]: DEBUG oslo_concurrency.lockutils [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Releasing lock "refresh_cache-76cb551e-e605-4c80-a6ef-e36681fc0bc2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 821.224508] env[62522]: DEBUG nova.compute.manager [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Instance network_info: |[{"id": "1e118d2e-4933-4fb5-8582-23601144447f", "address": "fa:16:3e:66:01:ad", "network": {"id": "27951c52-e28e-4c94-968c-c1b5ddd6b58b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1545103257-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ec421e0535f04c2ba17759e8342e1897", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e118d2e-49", "ovs_interfaceid": "1e118d2e-4933-4fb5-8582-23601144447f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 821.224916] env[62522]: DEBUG oslo_concurrency.lockutils [req-93665d5f-4f5c-4ce2-b93e-4d9021fbf465 req-4b9e777e-bddc-47db-85a8-f78787f553bc service nova] Acquired lock "refresh_cache-76cb551e-e605-4c80-a6ef-e36681fc0bc2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.225778] env[62522]: DEBUG nova.network.neutron [req-93665d5f-4f5c-4ce2-b93e-4d9021fbf465 req-4b9e777e-bddc-47db-85a8-f78787f553bc service nova] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Refreshing network info cache for port 1e118d2e-4933-4fb5-8582-23601144447f {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 821.226545] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:01:ad', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db1f7867-8524-469c-ab47-d2c9e2751d98', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1e118d2e-4933-4fb5-8582-23601144447f', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 821.235763] env[62522]: DEBUG oslo.service.loopingcall [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 821.239473] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 821.240491] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-84610243-2202-4dc7-9d07-8fd3b82cc710 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.268253] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 821.268253] env[62522]: value = "task-2415478" [ 821.268253] env[62522]: _type = "Task" [ 821.268253] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.284784] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415478, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.440026] env[62522]: DEBUG oslo_vmware.api [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415477, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.462716} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.444727] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 504396d8-077d-4563-91b5-a7a6259eea27/504396d8-077d-4563-91b5-a7a6259eea27.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 821.445196] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 821.448832] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5b55448b-4004-44cf-9a85-6b853ad842fc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.456621] env[62522]: DEBUG oslo_vmware.api [None req-39c05041-4a1f-4eeb-8032-42f8c82a3e1f tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415465, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.457971] env[62522]: DEBUG oslo_vmware.api [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Waiting for the task: (returnval){ [ 821.457971] env[62522]: value = "task-2415479" [ 821.457971] env[62522]: _type = "Task" [ 821.457971] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.467104] env[62522]: DEBUG oslo_vmware.api [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415479, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.533341] env[62522]: DEBUG nova.network.neutron [req-93665d5f-4f5c-4ce2-b93e-4d9021fbf465 req-4b9e777e-bddc-47db-85a8-f78787f553bc service nova] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Updated VIF entry in instance network info cache for port 1e118d2e-4933-4fb5-8582-23601144447f. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 821.533715] env[62522]: DEBUG nova.network.neutron [req-93665d5f-4f5c-4ce2-b93e-4d9021fbf465 req-4b9e777e-bddc-47db-85a8-f78787f553bc service nova] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Updating instance_info_cache with network_info: [{"id": "1e118d2e-4933-4fb5-8582-23601144447f", "address": "fa:16:3e:66:01:ad", "network": {"id": "27951c52-e28e-4c94-968c-c1b5ddd6b58b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1545103257-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ec421e0535f04c2ba17759e8342e1897", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e118d2e-49", "ovs_interfaceid": "1e118d2e-4933-4fb5-8582-23601144447f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.595889] env[62522]: DEBUG nova.compute.manager [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 821.602998] env[62522]: DEBUG nova.network.neutron [req-76945c4d-75ba-4202-a904-8dbca123d1a4 req-565bbcf2-6b5b-4cbd-9115-292250b44718 service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Updated VIF entry in instance network info cache for port 30356a78-c3a1-4db1-8efa-ccd3f3e4afd1. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 821.603490] env[62522]: DEBUG nova.network.neutron [req-76945c4d-75ba-4202-a904-8dbca123d1a4 req-565bbcf2-6b5b-4cbd-9115-292250b44718 service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Updating instance_info_cache with network_info: [{"id": "3e725d96-bba9-4651-8fc1-70f66a94b0d1", "address": "fa:16:3e:a2:a8:c7", "network": {"id": "d661d493-fae5-4b41-95f1-78fcf7ded492", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1318459925", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.145", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ca7e42d226a4ef6b48b882356da8950", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e725d96-bb", "ovs_interfaceid": "3e725d96-bba9-4651-8fc1-70f66a94b0d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ded8b2e3-ee39-454b-97d6-5001bdbb8f72", "address": "fa:16:3e:a6:42:a0", "network": {"id": "2402eaa3-cb14-4b01-a2ba-0d026901bac0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1525415661", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.202", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "3ca7e42d226a4ef6b48b882356da8950", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapded8b2e3-ee", "ovs_interfaceid": "ded8b2e3-ee39-454b-97d6-5001bdbb8f72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "30356a78-c3a1-4db1-8efa-ccd3f3e4afd1", "address": "fa:16:3e:8d:7a:82", "network": {"id": "d661d493-fae5-4b41-95f1-78fcf7ded492", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1318459925", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.131", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ca7e42d226a4ef6b48b882356da8950", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30356a78-c3", "ovs_interfaceid": "30356a78-c3a1-4db1-8efa-ccd3f3e4afd1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.627710] env[62522]: DEBUG nova.virt.hardware [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 821.627967] env[62522]: DEBUG nova.virt.hardware [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 821.628146] env[62522]: DEBUG nova.virt.hardware [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 821.629679] env[62522]: DEBUG nova.virt.hardware [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 821.629679] env[62522]: DEBUG nova.virt.hardware [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 821.629679] env[62522]: DEBUG nova.virt.hardware [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 821.629679] env[62522]: DEBUG nova.virt.hardware [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 821.629679] env[62522]: DEBUG nova.virt.hardware [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 821.629679] env[62522]: DEBUG nova.virt.hardware [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 821.629679] env[62522]: DEBUG nova.virt.hardware [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 821.629679] env[62522]: DEBUG nova.virt.hardware [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 821.630541] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-672bcbb8-3bd7-4b95-a746-404c216651dd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.641501] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4607528-105c-401d-a246-af91a72a9d8e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.696899] env[62522]: DEBUG oslo_vmware.api [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5284913e-4abb-ffa1-bf19-cd37782d1c5c, 'name': SearchDatastore_Task, 'duration_secs': 0.061961} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.697222] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 821.697485] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 821.697739] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 821.697890] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.698085] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 821.698391] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eb745ed2-a1c9-4484-816a-006012873770 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.709835] env[62522]: DEBUG oslo_vmware.api [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2415476, 'name': PowerOnVM_Task, 'duration_secs': 0.667785} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.710095] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 821.710298] env[62522]: INFO nova.compute.manager [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Took 8.21 seconds to spawn the instance on the hypervisor. [ 821.710510] env[62522]: DEBUG nova.compute.manager [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 821.711278] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c04384b9-4c4e-4d15-b96c-67835eaf3276 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.714419] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 821.714592] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 821.715548] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fe75a98-e01f-4433-8bb7-e4232103e2a0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.726289] env[62522]: DEBUG oslo_vmware.api [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Waiting for the task: (returnval){ [ 821.726289] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5222ee13-174f-674c-7337-3062cad6b1e5" [ 821.726289] env[62522]: _type = "Task" [ 821.726289] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.733819] env[62522]: DEBUG oslo_vmware.api [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5222ee13-174f-674c-7337-3062cad6b1e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.779673] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415478, 'name': CreateVM_Task, 'duration_secs': 0.349827} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.782315] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 821.783583] env[62522]: DEBUG oslo_concurrency.lockutils [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 821.783583] env[62522]: DEBUG oslo_concurrency.lockutils [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.783717] env[62522]: DEBUG oslo_concurrency.lockutils [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 821.784712] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d37c5af3-9dcf-4c3c-94f8-10f3ee04dc0f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.789127] env[62522]: DEBUG oslo_vmware.api [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Waiting for the task: (returnval){ [ 821.789127] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f9a4d0-c357-8363-371b-5a74bc3a2cef" [ 821.789127] env[62522]: _type = "Task" [ 821.789127] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.801205] env[62522]: DEBUG oslo_vmware.api [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f9a4d0-c357-8363-371b-5a74bc3a2cef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.953864] env[62522]: DEBUG oslo_vmware.api [None req-39c05041-4a1f-4eeb-8032-42f8c82a3e1f tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415465, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.970140] env[62522]: DEBUG oslo_vmware.api [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415479, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067868} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.972627] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 821.973584] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8b46362-5f58-4eb2-9977-9b482fc457c5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.995157] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Reconfiguring VM instance instance-0000002e to attach disk [datastore2] 504396d8-077d-4563-91b5-a7a6259eea27/504396d8-077d-4563-91b5-a7a6259eea27.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 821.998997] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-39980be9-2fd0-4739-b3ed-be8138d9c78f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.018124] env[62522]: DEBUG oslo_vmware.api [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Waiting for the task: (returnval){ [ 822.018124] env[62522]: value = "task-2415480" [ 822.018124] env[62522]: _type = "Task" [ 822.018124] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.027092] env[62522]: DEBUG oslo_vmware.api [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415480, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.038233] env[62522]: DEBUG oslo_concurrency.lockutils [req-93665d5f-4f5c-4ce2-b93e-4d9021fbf465 req-4b9e777e-bddc-47db-85a8-f78787f553bc service nova] Releasing lock "refresh_cache-76cb551e-e605-4c80-a6ef-e36681fc0bc2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 822.106108] env[62522]: DEBUG oslo_concurrency.lockutils [req-76945c4d-75ba-4202-a904-8dbca123d1a4 req-565bbcf2-6b5b-4cbd-9115-292250b44718 service nova] Releasing lock "refresh_cache-a185273e-cdaf-4967-832b-f75014b7b3f4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 822.178548] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7d2503f-4798-4f16-a388-b4271267670c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.186550] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89ca83d6-83f9-47d2-897c-280771889f43 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.217295] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eea577b-41b1-4af6-86a3-046c1c589e4e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.229760] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fd24f96-b470-423a-a0eb-d7d649b5680a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.238587] env[62522]: INFO nova.compute.manager [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Took 55.54 seconds to build instance. [ 822.253739] env[62522]: DEBUG nova.compute.provider_tree [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 822.261194] env[62522]: DEBUG oslo_vmware.api [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5222ee13-174f-674c-7337-3062cad6b1e5, 'name': SearchDatastore_Task, 'duration_secs': 0.042496} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.263301] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44fbbdcf-5aa0-4d5f-abc7-331bf717af54 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.272026] env[62522]: DEBUG oslo_vmware.api [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Waiting for the task: (returnval){ [ 822.272026] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]524d58fd-9d76-0973-4d15-9ae0f7c9c3fc" [ 822.272026] env[62522]: _type = "Task" [ 822.272026] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.284022] env[62522]: DEBUG oslo_vmware.api [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]524d58fd-9d76-0973-4d15-9ae0f7c9c3fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.302172] env[62522]: DEBUG oslo_vmware.api [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f9a4d0-c357-8363-371b-5a74bc3a2cef, 'name': SearchDatastore_Task, 'duration_secs': 0.039204} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.302563] env[62522]: DEBUG oslo_concurrency.lockutils [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 822.302897] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 822.303183] env[62522]: DEBUG oslo_concurrency.lockutils [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 822.449458] env[62522]: DEBUG oslo_vmware.api [None req-39c05041-4a1f-4eeb-8032-42f8c82a3e1f tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415465, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.528168] env[62522]: DEBUG oslo_vmware.api [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415480, 'name': ReconfigVM_Task, 'duration_secs': 0.269645} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.528540] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Reconfigured VM instance instance-0000002e to attach disk [datastore2] 504396d8-077d-4563-91b5-a7a6259eea27/504396d8-077d-4563-91b5-a7a6259eea27.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 822.529257] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cc91fb9e-5bec-4029-a44c-704bf72fb861 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.535652] env[62522]: DEBUG oslo_vmware.api [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Waiting for the task: (returnval){ [ 822.535652] env[62522]: value = "task-2415481" [ 822.535652] env[62522]: _type = "Task" [ 822.535652] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.546147] env[62522]: DEBUG oslo_vmware.api [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415481, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.724375] env[62522]: DEBUG nova.compute.manager [req-a5ed1d32-45f4-43c5-8d14-6fa6bddf6bd6 req-ac847f33-89c6-4717-b1b9-c190d6027a7b service nova] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Received event network-vif-plugged-680f73c8-8196-4790-84fe-eb56b69413df {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 822.724819] env[62522]: DEBUG oslo_concurrency.lockutils [req-a5ed1d32-45f4-43c5-8d14-6fa6bddf6bd6 req-ac847f33-89c6-4717-b1b9-c190d6027a7b service nova] Acquiring lock "5ed51dce-2a56-4389-acf8-280bd93ff5f0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.724951] env[62522]: DEBUG oslo_concurrency.lockutils [req-a5ed1d32-45f4-43c5-8d14-6fa6bddf6bd6 req-ac847f33-89c6-4717-b1b9-c190d6027a7b service nova] Lock "5ed51dce-2a56-4389-acf8-280bd93ff5f0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.725211] env[62522]: DEBUG oslo_concurrency.lockutils [req-a5ed1d32-45f4-43c5-8d14-6fa6bddf6bd6 req-ac847f33-89c6-4717-b1b9-c190d6027a7b service nova] Lock "5ed51dce-2a56-4389-acf8-280bd93ff5f0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.727229] env[62522]: DEBUG nova.compute.manager [req-a5ed1d32-45f4-43c5-8d14-6fa6bddf6bd6 req-ac847f33-89c6-4717-b1b9-c190d6027a7b service nova] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] No waiting events found dispatching network-vif-plugged-680f73c8-8196-4790-84fe-eb56b69413df {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 822.727414] env[62522]: WARNING nova.compute.manager [req-a5ed1d32-45f4-43c5-8d14-6fa6bddf6bd6 req-ac847f33-89c6-4717-b1b9-c190d6027a7b service nova] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Received unexpected event network-vif-plugged-680f73c8-8196-4790-84fe-eb56b69413df for instance with vm_state building and task_state spawning. [ 822.741977] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76f9863d-a5ce-4dcd-84a5-b28150d7c3d9 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "3c4c395c-0625-4569-990d-e2d4ad162c14" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 105.924s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.764897] env[62522]: DEBUG nova.scheduler.client.report [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 822.780895] env[62522]: DEBUG oslo_vmware.api [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]524d58fd-9d76-0973-4d15-9ae0f7c9c3fc, 'name': SearchDatastore_Task, 'duration_secs': 0.021415} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.781049] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 822.781320] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] a185273e-cdaf-4967-832b-f75014b7b3f4/a185273e-cdaf-4967-832b-f75014b7b3f4.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 822.781767] env[62522]: DEBUG oslo_concurrency.lockutils [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.781861] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 822.782177] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f6b4a2fd-c402-4581-8721-c0d23155703a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.784284] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2f47894e-a095-47fb-908a-7dedcc426b53 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.792740] env[62522]: DEBUG oslo_vmware.api [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Waiting for the task: (returnval){ [ 822.792740] env[62522]: value = "task-2415482" [ 822.792740] env[62522]: _type = "Task" [ 822.792740] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.798102] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 822.798283] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 822.802237] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5df2df3e-c21e-4228-8934-0a8f36b5b966 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.804924] env[62522]: DEBUG oslo_vmware.api [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415482, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.807704] env[62522]: DEBUG oslo_vmware.api [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Waiting for the task: (returnval){ [ 822.807704] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]527b08ed-00c3-89f0-8495-7f57e048b7fb" [ 822.807704] env[62522]: _type = "Task" [ 822.807704] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.816794] env[62522]: DEBUG oslo_vmware.api [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]527b08ed-00c3-89f0-8495-7f57e048b7fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.883659] env[62522]: DEBUG nova.network.neutron [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Successfully updated port: 680f73c8-8196-4790-84fe-eb56b69413df {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 822.890634] env[62522]: DEBUG nova.compute.manager [req-62cfe649-0f8d-4d61-a00f-258a89f8aea4 req-e36074bc-d535-44f4-abb2-9edf04a6915a service nova] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Received event network-changed-1b7d6d1b-0d3c-47b3-8d93-c8cdc0b1e00f {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 822.890634] env[62522]: DEBUG nova.compute.manager [req-62cfe649-0f8d-4d61-a00f-258a89f8aea4 req-e36074bc-d535-44f4-abb2-9edf04a6915a service nova] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Refreshing instance network info cache due to event network-changed-1b7d6d1b-0d3c-47b3-8d93-c8cdc0b1e00f. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 822.890634] env[62522]: DEBUG oslo_concurrency.lockutils [req-62cfe649-0f8d-4d61-a00f-258a89f8aea4 req-e36074bc-d535-44f4-abb2-9edf04a6915a service nova] Acquiring lock "refresh_cache-3c4c395c-0625-4569-990d-e2d4ad162c14" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 822.890768] env[62522]: DEBUG oslo_concurrency.lockutils [req-62cfe649-0f8d-4d61-a00f-258a89f8aea4 req-e36074bc-d535-44f4-abb2-9edf04a6915a service nova] Acquired lock "refresh_cache-3c4c395c-0625-4569-990d-e2d4ad162c14" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.890880] env[62522]: DEBUG nova.network.neutron [req-62cfe649-0f8d-4d61-a00f-258a89f8aea4 req-e36074bc-d535-44f4-abb2-9edf04a6915a service nova] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Refreshing network info cache for port 1b7d6d1b-0d3c-47b3-8d93-c8cdc0b1e00f {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 822.950558] env[62522]: DEBUG oslo_vmware.api [None req-39c05041-4a1f-4eeb-8032-42f8c82a3e1f tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415465, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.047863] env[62522]: DEBUG oslo_vmware.api [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415481, 'name': Rename_Task, 'duration_secs': 0.135772} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.048254] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 823.048584] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a626cf7c-0526-4883-9be7-8be50e9c58a3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.056320] env[62522]: DEBUG oslo_vmware.api [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Waiting for the task: (returnval){ [ 823.056320] env[62522]: value = "task-2415483" [ 823.056320] env[62522]: _type = "Task" [ 823.056320] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.066234] env[62522]: DEBUG oslo_vmware.api [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415483, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.247014] env[62522]: DEBUG nova.compute.manager [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 823.271583] env[62522]: DEBUG oslo_concurrency.lockutils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.684s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.272257] env[62522]: DEBUG nova.compute.manager [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 823.275187] env[62522]: DEBUG oslo_concurrency.lockutils [None req-48137217-f63e-4645-b0f9-325467ae03f2 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.974s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.275427] env[62522]: DEBUG oslo_concurrency.lockutils [None req-48137217-f63e-4645-b0f9-325467ae03f2 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.277856] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.363s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.278110] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.279963] env[62522]: DEBUG oslo_concurrency.lockutils [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.704s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.281617] env[62522]: INFO nova.compute.claims [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 823.303732] env[62522]: DEBUG oslo_vmware.api [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415482, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.315724] env[62522]: INFO nova.scheduler.client.report [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Deleted allocations for instance 63a7f41d-13cc-420a-96d3-a3f102869137 [ 823.317871] env[62522]: INFO nova.scheduler.client.report [None req-48137217-f63e-4645-b0f9-325467ae03f2 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Deleted allocations for instance bf2ccaeb-610a-437b-be94-d3caefbe15c5 [ 823.336039] env[62522]: DEBUG oslo_vmware.api [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]527b08ed-00c3-89f0-8495-7f57e048b7fb, 'name': SearchDatastore_Task, 'duration_secs': 0.026441} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.336681] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a5e0de7-2166-4f31-8af2-7ec23fcf6900 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.343608] env[62522]: DEBUG oslo_vmware.api [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Waiting for the task: (returnval){ [ 823.343608] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e26b7b-e22e-f295-261f-95ee8d19818c" [ 823.343608] env[62522]: _type = "Task" [ 823.343608] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.352908] env[62522]: DEBUG oslo_vmware.api [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e26b7b-e22e-f295-261f-95ee8d19818c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.389429] env[62522]: DEBUG oslo_concurrency.lockutils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquiring lock "refresh_cache-5ed51dce-2a56-4389-acf8-280bd93ff5f0" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 823.389616] env[62522]: DEBUG oslo_concurrency.lockutils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquired lock "refresh_cache-5ed51dce-2a56-4389-acf8-280bd93ff5f0" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.390048] env[62522]: DEBUG nova.network.neutron [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 823.453349] env[62522]: DEBUG oslo_vmware.api [None req-39c05041-4a1f-4eeb-8032-42f8c82a3e1f tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415465, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.567521] env[62522]: DEBUG oslo_vmware.api [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415483, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.754251] env[62522]: DEBUG nova.network.neutron [req-62cfe649-0f8d-4d61-a00f-258a89f8aea4 req-e36074bc-d535-44f4-abb2-9edf04a6915a service nova] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Updated VIF entry in instance network info cache for port 1b7d6d1b-0d3c-47b3-8d93-c8cdc0b1e00f. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 823.754929] env[62522]: DEBUG nova.network.neutron [req-62cfe649-0f8d-4d61-a00f-258a89f8aea4 req-e36074bc-d535-44f4-abb2-9edf04a6915a service nova] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Updating instance_info_cache with network_info: [{"id": "1b7d6d1b-0d3c-47b3-8d93-c8cdc0b1e00f", "address": "fa:16:3e:41:5d:d8", "network": {"id": "c3450427-ea7e-4a07-8399-53265d390e06", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1613138323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.174", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "686854cd52ce4809a4d315275260da54", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c42bb08a-77b4-4bba-8166-702cbb1b5f1e", "external-id": "nsx-vlan-transportzone-137", "segmentation_id": 137, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b7d6d1b-0d", "ovs_interfaceid": "1b7d6d1b-0d3c-47b3-8d93-c8cdc0b1e00f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.779923] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.790244] env[62522]: DEBUG nova.compute.utils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 823.792652] env[62522]: DEBUG nova.compute.manager [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 823.792994] env[62522]: DEBUG nova.network.neutron [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 823.809859] env[62522]: DEBUG oslo_vmware.api [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415482, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.839521} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.809859] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] a185273e-cdaf-4967-832b-f75014b7b3f4/a185273e-cdaf-4967-832b-f75014b7b3f4.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 823.809859] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 823.809859] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2d7ea57b-dafe-405a-9803-d3bde52cf619 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.816024] env[62522]: DEBUG oslo_vmware.api [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Waiting for the task: (returnval){ [ 823.816024] env[62522]: value = "task-2415484" [ 823.816024] env[62522]: _type = "Task" [ 823.816024] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.824820] env[62522]: DEBUG oslo_vmware.api [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415484, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.839944] env[62522]: DEBUG oslo_concurrency.lockutils [None req-48137217-f63e-4645-b0f9-325467ae03f2 tempest-VolumesAssistedSnapshotsTest-1152024380 tempest-VolumesAssistedSnapshotsTest-1152024380-project-member] Lock "bf2ccaeb-610a-437b-be94-d3caefbe15c5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.984s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.841229] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df81802f-ee8f-4abb-8903-0aaf6dd46505 tempest-ServerShowV254Test-490011704 tempest-ServerShowV254Test-490011704-project-member] Lock "63a7f41d-13cc-420a-96d3-a3f102869137" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.781s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.855016] env[62522]: DEBUG oslo_vmware.api [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e26b7b-e22e-f295-261f-95ee8d19818c, 'name': SearchDatastore_Task, 'duration_secs': 0.059549} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.855016] env[62522]: DEBUG oslo_concurrency.lockutils [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 823.855016] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 76cb551e-e605-4c80-a6ef-e36681fc0bc2/76cb551e-e605-4c80-a6ef-e36681fc0bc2.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 823.855311] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2efc47b1-17ff-4779-ac2e-2bca0af44614 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.864168] env[62522]: DEBUG oslo_vmware.api [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Waiting for the task: (returnval){ [ 823.864168] env[62522]: value = "task-2415485" [ 823.864168] env[62522]: _type = "Task" [ 823.864168] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.880557] env[62522]: DEBUG oslo_vmware.api [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415485, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.882946] env[62522]: DEBUG nova.policy [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e3ce17ca2f0d457c8768549d66b1400a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e47f8c538134439d8405e2825ad0af22', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 823.954347] env[62522]: DEBUG oslo_vmware.api [None req-39c05041-4a1f-4eeb-8032-42f8c82a3e1f tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415465, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.973064] env[62522]: DEBUG nova.network.neutron [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 824.069184] env[62522]: DEBUG oslo_vmware.api [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415483, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.260154] env[62522]: DEBUG oslo_concurrency.lockutils [req-62cfe649-0f8d-4d61-a00f-258a89f8aea4 req-e36074bc-d535-44f4-abb2-9edf04a6915a service nova] Releasing lock "refresh_cache-3c4c395c-0625-4569-990d-e2d4ad162c14" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 824.300466] env[62522]: DEBUG nova.compute.manager [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 824.325882] env[62522]: DEBUG oslo_vmware.api [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415484, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070164} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.326851] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 824.327849] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e7a835e-95ce-47ee-80f7-fc84e3532618 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.373656] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Reconfiguring VM instance instance-0000002c to attach disk [datastore2] a185273e-cdaf-4967-832b-f75014b7b3f4/a185273e-cdaf-4967-832b-f75014b7b3f4.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 824.377969] env[62522]: DEBUG nova.network.neutron [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Updating instance_info_cache with network_info: [{"id": "680f73c8-8196-4790-84fe-eb56b69413df", "address": "fa:16:3e:17:65:84", "network": {"id": "6cb7cdaa-cbcd-4565-a222-310242ff25b1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-684499993-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e47f8c538134439d8405e2825ad0af22", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0746f464-a938-427b-ba02-600449df5070", "external-id": "nsx-vlan-transportzone-881", "segmentation_id": 881, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap680f73c8-81", "ovs_interfaceid": "680f73c8-8196-4790-84fe-eb56b69413df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.379924] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51ae1b37-c503-41ed-b7dc-b2677f3d9ee8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.397223] env[62522]: DEBUG oslo_concurrency.lockutils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Releasing lock "refresh_cache-5ed51dce-2a56-4389-acf8-280bd93ff5f0" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 824.397534] env[62522]: DEBUG nova.compute.manager [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Instance network_info: |[{"id": "680f73c8-8196-4790-84fe-eb56b69413df", "address": "fa:16:3e:17:65:84", "network": {"id": "6cb7cdaa-cbcd-4565-a222-310242ff25b1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-684499993-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e47f8c538134439d8405e2825ad0af22", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0746f464-a938-427b-ba02-600449df5070", "external-id": "nsx-vlan-transportzone-881", "segmentation_id": 881, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap680f73c8-81", "ovs_interfaceid": "680f73c8-8196-4790-84fe-eb56b69413df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 824.401821] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:65:84', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0746f464-a938-427b-ba02-600449df5070', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '680f73c8-8196-4790-84fe-eb56b69413df', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 824.409128] env[62522]: DEBUG oslo.service.loopingcall [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 824.410172] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 824.410736] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-79e3e3b3-0e5c-4cc6-bbb9-908dd617716c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.434246] env[62522]: DEBUG oslo_vmware.api [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Waiting for the task: (returnval){ [ 824.434246] env[62522]: value = "task-2415486" [ 824.434246] env[62522]: _type = "Task" [ 824.434246] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.434529] env[62522]: DEBUG oslo_vmware.api [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415485, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.482392} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.435822] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 76cb551e-e605-4c80-a6ef-e36681fc0bc2/76cb551e-e605-4c80-a6ef-e36681fc0bc2.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 824.435822] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 824.439651] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0b2b36cc-52b3-480c-aa58-64cf1adfc834 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.442467] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 824.442467] env[62522]: value = "task-2415487" [ 824.442467] env[62522]: _type = "Task" [ 824.442467] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.455729] env[62522]: DEBUG oslo_vmware.api [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415486, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.456066] env[62522]: DEBUG oslo_vmware.api [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Waiting for the task: (returnval){ [ 824.456066] env[62522]: value = "task-2415488" [ 824.456066] env[62522]: _type = "Task" [ 824.456066] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.466382] env[62522]: DEBUG oslo_vmware.api [None req-39c05041-4a1f-4eeb-8032-42f8c82a3e1f tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415465, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.466382] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415487, 'name': CreateVM_Task} progress is 10%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.476236] env[62522]: DEBUG oslo_vmware.api [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415488, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.567438] env[62522]: DEBUG oslo_vmware.api [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415483, 'name': PowerOnVM_Task, 'duration_secs': 1.368474} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.570298] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 824.570551] env[62522]: INFO nova.compute.manager [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Took 8.40 seconds to spawn the instance on the hypervisor. [ 824.573016] env[62522]: DEBUG nova.compute.manager [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 824.573016] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-653d613b-5780-484e-8df2-2e42575b12d0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.791771] env[62522]: DEBUG nova.network.neutron [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Successfully created port: 71039daa-ce8b-462d-b9f3-8e07f9ec2666 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 824.946482] env[62522]: DEBUG oslo_vmware.api [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415486, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.963198] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415487, 'name': CreateVM_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.963570] env[62522]: DEBUG oslo_vmware.api [None req-39c05041-4a1f-4eeb-8032-42f8c82a3e1f tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415465, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.972232] env[62522]: DEBUG oslo_vmware.api [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415488, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.990878] env[62522]: DEBUG nova.compute.manager [req-4cb4e26a-f3a3-4fce-bd9c-151d6b1f524e req-eb55741b-86fd-449d-8407-240cdeeea0e7 service nova] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Received event network-changed-680f73c8-8196-4790-84fe-eb56b69413df {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 824.991091] env[62522]: DEBUG nova.compute.manager [req-4cb4e26a-f3a3-4fce-bd9c-151d6b1f524e req-eb55741b-86fd-449d-8407-240cdeeea0e7 service nova] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Refreshing instance network info cache due to event network-changed-680f73c8-8196-4790-84fe-eb56b69413df. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 824.991312] env[62522]: DEBUG oslo_concurrency.lockutils [req-4cb4e26a-f3a3-4fce-bd9c-151d6b1f524e req-eb55741b-86fd-449d-8407-240cdeeea0e7 service nova] Acquiring lock "refresh_cache-5ed51dce-2a56-4389-acf8-280bd93ff5f0" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 824.991456] env[62522]: DEBUG oslo_concurrency.lockutils [req-4cb4e26a-f3a3-4fce-bd9c-151d6b1f524e req-eb55741b-86fd-449d-8407-240cdeeea0e7 service nova] Acquired lock "refresh_cache-5ed51dce-2a56-4389-acf8-280bd93ff5f0" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.991644] env[62522]: DEBUG nova.network.neutron [req-4cb4e26a-f3a3-4fce-bd9c-151d6b1f524e req-eb55741b-86fd-449d-8407-240cdeeea0e7 service nova] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Refreshing network info cache for port 680f73c8-8196-4790-84fe-eb56b69413df {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 825.022040] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edac2c9a-9003-4b50-bd33-c29f8c352f5f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.028426] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b552713-3981-482e-81f4-39e2adb498b4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.064531] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c520ff33-64c8-4440-919c-866c88f27b88 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.073368] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63fe665d-071b-4d0d-8707-8fd96007b3d3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.095135] env[62522]: DEBUG nova.compute.provider_tree [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 825.097660] env[62522]: INFO nova.compute.manager [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Took 55.66 seconds to build instance. [ 825.315479] env[62522]: DEBUG nova.compute.manager [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 825.347048] env[62522]: DEBUG nova.virt.hardware [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 825.347329] env[62522]: DEBUG nova.virt.hardware [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 825.347539] env[62522]: DEBUG nova.virt.hardware [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 825.347682] env[62522]: DEBUG nova.virt.hardware [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 825.347828] env[62522]: DEBUG nova.virt.hardware [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 825.348030] env[62522]: DEBUG nova.virt.hardware [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 825.349069] env[62522]: DEBUG nova.virt.hardware [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 825.349514] env[62522]: DEBUG nova.virt.hardware [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 825.349755] env[62522]: DEBUG nova.virt.hardware [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 825.349977] env[62522]: DEBUG nova.virt.hardware [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 825.350433] env[62522]: DEBUG nova.virt.hardware [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 825.351692] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d2ee664-b92c-4eda-9e3d-2313bb2668d4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.364561] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed794f4c-3220-41d0-86f7-62bcb07407b3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.447476] env[62522]: DEBUG oslo_vmware.api [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415486, 'name': ReconfigVM_Task, 'duration_secs': 1.018881} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.454101] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Reconfigured VM instance instance-0000002c to attach disk [datastore2] a185273e-cdaf-4967-832b-f75014b7b3f4/a185273e-cdaf-4967-832b-f75014b7b3f4.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 825.454869] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-afb3166c-db17-4937-b7d5-3b1fb6d6098b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.461490] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415487, 'name': CreateVM_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.465575] env[62522]: DEBUG oslo_vmware.api [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Waiting for the task: (returnval){ [ 825.465575] env[62522]: value = "task-2415489" [ 825.465575] env[62522]: _type = "Task" [ 825.465575] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.466570] env[62522]: DEBUG oslo_vmware.api [None req-39c05041-4a1f-4eeb-8032-42f8c82a3e1f tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415465, 'name': ReconfigVM_Task, 'duration_secs': 6.789159} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.471236] env[62522]: DEBUG oslo_concurrency.lockutils [None req-39c05041-4a1f-4eeb-8032-42f8c82a3e1f tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Releasing lock "19d3d54c-5ba1-420f-b012-a08add8546c9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 825.471236] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-39c05041-4a1f-4eeb-8032-42f8c82a3e1f tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Reconfigured VM to detach interface {{(pid=62522) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 825.480837] env[62522]: DEBUG oslo_vmware.api [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415488, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.866676} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.484500] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 825.484864] env[62522]: DEBUG oslo_vmware.api [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415489, 'name': Rename_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.485609] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24f2d1ff-593b-4b1e-a33d-8917f24a45e0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.510539] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Reconfiguring VM instance instance-0000002f to attach disk [datastore2] 76cb551e-e605-4c80-a6ef-e36681fc0bc2/76cb551e-e605-4c80-a6ef-e36681fc0bc2.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 825.510539] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ba1e9d5-911d-4646-8322-d11b1414eb26 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.533218] env[62522]: DEBUG oslo_vmware.api [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Waiting for the task: (returnval){ [ 825.533218] env[62522]: value = "task-2415490" [ 825.533218] env[62522]: _type = "Task" [ 825.533218] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.540256] env[62522]: DEBUG oslo_vmware.api [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415490, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.601182] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76877934-f8ad-4f3e-8d73-0bc78ee13f4c tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Lock "504396d8-077d-4563-91b5-a7a6259eea27" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 108.061s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 825.621400] env[62522]: ERROR nova.scheduler.client.report [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [req-90abb29a-3c63-43de-9ad7-85a0b9634492] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c7fa38b2-245d-4337-a012-22c1a01c0a72. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-90abb29a-3c63-43de-9ad7-85a0b9634492"}]} [ 825.652047] env[62522]: DEBUG nova.scheduler.client.report [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Refreshing inventories for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 825.665366] env[62522]: DEBUG nova.scheduler.client.report [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Updating ProviderTree inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 825.665860] env[62522]: DEBUG nova.compute.provider_tree [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 825.678569] env[62522]: DEBUG nova.scheduler.client.report [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Refreshing aggregate associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, aggregates: None {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 825.703448] env[62522]: DEBUG nova.scheduler.client.report [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Refreshing trait associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 825.904895] env[62522]: DEBUG nova.network.neutron [req-4cb4e26a-f3a3-4fce-bd9c-151d6b1f524e req-eb55741b-86fd-449d-8407-240cdeeea0e7 service nova] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Updated VIF entry in instance network info cache for port 680f73c8-8196-4790-84fe-eb56b69413df. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 825.905814] env[62522]: DEBUG nova.network.neutron [req-4cb4e26a-f3a3-4fce-bd9c-151d6b1f524e req-eb55741b-86fd-449d-8407-240cdeeea0e7 service nova] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Updating instance_info_cache with network_info: [{"id": "680f73c8-8196-4790-84fe-eb56b69413df", "address": "fa:16:3e:17:65:84", "network": {"id": "6cb7cdaa-cbcd-4565-a222-310242ff25b1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-684499993-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e47f8c538134439d8405e2825ad0af22", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0746f464-a938-427b-ba02-600449df5070", "external-id": "nsx-vlan-transportzone-881", "segmentation_id": 881, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap680f73c8-81", "ovs_interfaceid": "680f73c8-8196-4790-84fe-eb56b69413df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.964404] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415487, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.984077] env[62522]: DEBUG oslo_vmware.api [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415489, 'name': Rename_Task, 'duration_secs': 0.264442} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.986692] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 825.987176] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1e9e31f1-301e-4b3a-a054-77fc4616fc12 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.993509] env[62522]: DEBUG oslo_vmware.api [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Waiting for the task: (returnval){ [ 825.993509] env[62522]: value = "task-2415491" [ 825.993509] env[62522]: _type = "Task" [ 825.993509] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.006460] env[62522]: DEBUG oslo_vmware.api [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415491, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.043169] env[62522]: DEBUG oslo_vmware.api [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415490, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.104771] env[62522]: DEBUG nova.compute.manager [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 826.312521] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3335e76c-08e9-4520-87d2-a88e581eb7f0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.319947] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38b99887-7a6f-43de-9a88-e6d3b7764e15 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.351204] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bea3cbd-77d9-4483-83a9-02c3f9e2ae8a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.358869] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-558f5b44-5b62-44d6-8ea7-477a07641268 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.374418] env[62522]: DEBUG nova.compute.provider_tree [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 826.408342] env[62522]: DEBUG oslo_concurrency.lockutils [req-4cb4e26a-f3a3-4fce-bd9c-151d6b1f524e req-eb55741b-86fd-449d-8407-240cdeeea0e7 service nova] Releasing lock "refresh_cache-5ed51dce-2a56-4389-acf8-280bd93ff5f0" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 826.464505] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415487, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.503957] env[62522]: DEBUG oslo_vmware.api [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415491, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.542029] env[62522]: DEBUG oslo_vmware.api [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415490, 'name': ReconfigVM_Task, 'duration_secs': 0.714355} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.543231] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Reconfigured VM instance instance-0000002f to attach disk [datastore2] 76cb551e-e605-4c80-a6ef-e36681fc0bc2/76cb551e-e605-4c80-a6ef-e36681fc0bc2.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 826.544039] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f6942aaa-4b13-4bc9-8d76-4e6762980904 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.549742] env[62522]: DEBUG oslo_vmware.api [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Waiting for the task: (returnval){ [ 826.549742] env[62522]: value = "task-2415492" [ 826.549742] env[62522]: _type = "Task" [ 826.549742] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.558292] env[62522]: DEBUG oslo_vmware.api [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415492, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.580759] env[62522]: DEBUG nova.compute.manager [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 826.581687] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4458db21-62af-445d-89ac-ef74832a2207 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.631056] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.906744] env[62522]: DEBUG oslo_concurrency.lockutils [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "19d3d54c-5ba1-420f-b012-a08add8546c9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.907073] env[62522]: DEBUG oslo_concurrency.lockutils [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "19d3d54c-5ba1-420f-b012-a08add8546c9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.907333] env[62522]: DEBUG oslo_concurrency.lockutils [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "19d3d54c-5ba1-420f-b012-a08add8546c9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.907558] env[62522]: DEBUG oslo_concurrency.lockutils [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "19d3d54c-5ba1-420f-b012-a08add8546c9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.907826] env[62522]: DEBUG oslo_concurrency.lockutils [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "19d3d54c-5ba1-420f-b012-a08add8546c9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.910419] env[62522]: INFO nova.compute.manager [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Terminating instance [ 826.921441] env[62522]: DEBUG nova.scheduler.client.report [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Updated inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with generation 78 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 826.921767] env[62522]: DEBUG nova.compute.provider_tree [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Updating resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 generation from 78 to 79 during operation: update_inventory {{(pid=62522) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 826.922035] env[62522]: DEBUG nova.compute.provider_tree [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 826.958495] env[62522]: DEBUG oslo_concurrency.lockutils [None req-39c05041-4a1f-4eeb-8032-42f8c82a3e1f tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "refresh_cache-19d3d54c-5ba1-420f-b012-a08add8546c9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 826.958730] env[62522]: DEBUG oslo_concurrency.lockutils [None req-39c05041-4a1f-4eeb-8032-42f8c82a3e1f tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired lock "refresh_cache-19d3d54c-5ba1-420f-b012-a08add8546c9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.958812] env[62522]: DEBUG nova.network.neutron [None req-39c05041-4a1f-4eeb-8032-42f8c82a3e1f tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 826.964802] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415487, 'name': CreateVM_Task, 'duration_secs': 2.159589} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.965357] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 826.966223] env[62522]: DEBUG oslo_concurrency.lockutils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 826.966459] env[62522]: DEBUG oslo_concurrency.lockutils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.966823] env[62522]: DEBUG oslo_concurrency.lockutils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 826.967126] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5245788-9564-4c87-8ff0-a4b66d20bcdf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.973070] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 826.973070] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a9d613-dddf-b094-de0e-b88d82b75559" [ 826.973070] env[62522]: _type = "Task" [ 826.973070] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.982601] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a9d613-dddf-b094-de0e-b88d82b75559, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.003384] env[62522]: DEBUG oslo_vmware.api [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415491, 'name': PowerOnVM_Task, 'duration_secs': 0.928318} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.003582] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 827.003798] env[62522]: INFO nova.compute.manager [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Took 17.49 seconds to spawn the instance on the hypervisor. [ 827.004063] env[62522]: DEBUG nova.compute.manager [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 827.004976] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecbac462-6a81-432b-9aa7-f19175cd5c50 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.046128] env[62522]: DEBUG nova.network.neutron [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Successfully updated port: 71039daa-ce8b-462d-b9f3-8e07f9ec2666 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 827.060980] env[62522]: DEBUG oslo_vmware.api [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415492, 'name': Rename_Task, 'duration_secs': 0.277733} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.061614] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 827.062606] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5b831f98-501b-441f-9ade-50c6361c9f69 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.071111] env[62522]: DEBUG oslo_vmware.api [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Waiting for the task: (returnval){ [ 827.071111] env[62522]: value = "task-2415493" [ 827.071111] env[62522]: _type = "Task" [ 827.071111] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.083546] env[62522]: DEBUG oslo_vmware.api [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415493, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.093429] env[62522]: INFO nova.compute.manager [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] instance snapshotting [ 827.096501] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7b4b489-a0dd-44c1-8812-611de731289b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.119213] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01a17fe2-bedd-4c29-978e-67fc5e2e93f6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.251128] env[62522]: DEBUG nova.compute.manager [req-a28a8a4d-b7c6-40de-aa8c-69e64dd29aae req-de45deda-ccbf-42de-9a85-7b05a8f4c3e5 service nova] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Received event network-vif-deleted-6f83c77d-45cc-446e-8a38-eb8a94e38f59 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 827.251128] env[62522]: INFO nova.compute.manager [req-a28a8a4d-b7c6-40de-aa8c-69e64dd29aae req-de45deda-ccbf-42de-9a85-7b05a8f4c3e5 service nova] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Neutron deleted interface 6f83c77d-45cc-446e-8a38-eb8a94e38f59; detaching it from the instance and deleting it from the info cache [ 827.251128] env[62522]: DEBUG nova.network.neutron [req-a28a8a4d-b7c6-40de-aa8c-69e64dd29aae req-de45deda-ccbf-42de-9a85-7b05a8f4c3e5 service nova] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Updating instance_info_cache with network_info: [{"id": "bb09cad6-a323-4801-8cb8-7e58b646a38e", "address": "fa:16:3e:c2:3c:c7", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.161", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb09cad6-a3", "ovs_interfaceid": "bb09cad6-a323-4801-8cb8-7e58b646a38e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.417749] env[62522]: DEBUG nova.compute.manager [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 827.418040] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 827.418932] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e43e26e-a00d-448e-b622-9b64193f54d4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.430742] env[62522]: DEBUG oslo_concurrency.lockutils [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.151s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.431135] env[62522]: DEBUG nova.compute.manager [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 827.434413] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 827.434813] env[62522]: DEBUG oslo_concurrency.lockutils [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.402s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.436189] env[62522]: INFO nova.compute.claims [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 827.441831] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-472cde10-f910-4bd5-85a4-6c2a2d4419e3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.450869] env[62522]: DEBUG oslo_vmware.api [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 827.450869] env[62522]: value = "task-2415494" [ 827.450869] env[62522]: _type = "Task" [ 827.450869] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.463701] env[62522]: DEBUG oslo_vmware.api [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415494, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.485924] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a9d613-dddf-b094-de0e-b88d82b75559, 'name': SearchDatastore_Task, 'duration_secs': 0.014166} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.486987] env[62522]: DEBUG oslo_concurrency.lockutils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 827.487309] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 827.487606] env[62522]: DEBUG oslo_concurrency.lockutils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.487923] env[62522]: DEBUG oslo_concurrency.lockutils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.488166] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 827.488735] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-804c89e3-5c2f-4c22-ad99-5f3105b49716 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.499820] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 827.500082] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 827.501381] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-610a6038-b009-4328-897d-dccd09992fb6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.508321] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 827.508321] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]523419ad-bf31-7b71-3948-49a25392c634" [ 827.508321] env[62522]: _type = "Task" [ 827.508321] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.527924] env[62522]: INFO nova.compute.manager [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Took 67.44 seconds to build instance. [ 827.530034] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]523419ad-bf31-7b71-3948-49a25392c634, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.549211] env[62522]: DEBUG oslo_concurrency.lockutils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquiring lock "refresh_cache-74e663b1-b552-4b71-aa74-308e908d79e7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.549364] env[62522]: DEBUG oslo_concurrency.lockutils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquired lock "refresh_cache-74e663b1-b552-4b71-aa74-308e908d79e7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.549557] env[62522]: DEBUG nova.network.neutron [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 827.583425] env[62522]: DEBUG oslo_vmware.api [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415493, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.630651] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Creating Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 827.631060] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-2cce992e-1c4f-48a7-85c9-5bca59266645 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.643041] env[62522]: DEBUG oslo_vmware.api [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Waiting for the task: (returnval){ [ 827.643041] env[62522]: value = "task-2415495" [ 827.643041] env[62522]: _type = "Task" [ 827.643041] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.652022] env[62522]: DEBUG oslo_vmware.api [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415495, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.754112] env[62522]: DEBUG oslo_concurrency.lockutils [req-a28a8a4d-b7c6-40de-aa8c-69e64dd29aae req-de45deda-ccbf-42de-9a85-7b05a8f4c3e5 service nova] Acquiring lock "19d3d54c-5ba1-420f-b012-a08add8546c9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.880096] env[62522]: INFO nova.network.neutron [None req-39c05041-4a1f-4eeb-8032-42f8c82a3e1f tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Port 6f83c77d-45cc-446e-8a38-eb8a94e38f59 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 827.881107] env[62522]: DEBUG nova.network.neutron [None req-39c05041-4a1f-4eeb-8032-42f8c82a3e1f tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Updating instance_info_cache with network_info: [{"id": "bb09cad6-a323-4801-8cb8-7e58b646a38e", "address": "fa:16:3e:c2:3c:c7", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.161", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb09cad6-a3", "ovs_interfaceid": "bb09cad6-a323-4801-8cb8-7e58b646a38e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.945919] env[62522]: DEBUG nova.compute.utils [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 827.948168] env[62522]: DEBUG nova.compute.manager [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 827.948168] env[62522]: DEBUG nova.network.neutron [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 827.967057] env[62522]: DEBUG oslo_vmware.api [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415494, 'name': PowerOffVM_Task, 'duration_secs': 0.196806} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.967548] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 827.969018] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 827.969018] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c179b458-8807-42b9-b3a5-8095863cad65 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.026625] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]523419ad-bf31-7b71-3948-49a25392c634, 'name': SearchDatastore_Task, 'duration_secs': 0.017851} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.027633] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf9cd879-c4bd-4c43-b046-2e49addeb6b8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.032418] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d393943e-8214-4f4e-a5a1-c323d71483e1 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Lock "a185273e-cdaf-4967-832b-f75014b7b3f4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 114.870s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.041563] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 828.042021] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 828.042081] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Deleting the datastore file [datastore1] 19d3d54c-5ba1-420f-b012-a08add8546c9 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 828.042371] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 828.042371] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52daec89-fda9-527c-ae6e-55a3eb0901cd" [ 828.042371] env[62522]: _type = "Task" [ 828.042371] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.042566] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3dfaab47-8694-4830-8afb-a3f2ed5db05e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.053330] env[62522]: DEBUG nova.policy [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bd53fb53c60c48b4964a8d84aa979458', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '23589532e76f45e1aa7c1f48a2022a19', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 828.059705] env[62522]: DEBUG oslo_vmware.api [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 828.059705] env[62522]: value = "task-2415497" [ 828.059705] env[62522]: _type = "Task" [ 828.059705] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.059970] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52daec89-fda9-527c-ae6e-55a3eb0901cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.072995] env[62522]: DEBUG oslo_vmware.api [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415497, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.082873] env[62522]: DEBUG oslo_vmware.api [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415493, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.127321] env[62522]: DEBUG nova.network.neutron [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 828.155093] env[62522]: DEBUG oslo_vmware.api [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415495, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.386941] env[62522]: DEBUG oslo_concurrency.lockutils [None req-39c05041-4a1f-4eeb-8032-42f8c82a3e1f tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Releasing lock "refresh_cache-19d3d54c-5ba1-420f-b012-a08add8546c9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 828.455699] env[62522]: DEBUG nova.compute.manager [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 828.535521] env[62522]: DEBUG nova.compute.manager [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 828.565456] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52daec89-fda9-527c-ae6e-55a3eb0901cd, 'name': SearchDatastore_Task, 'duration_secs': 0.017162} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.570121] env[62522]: DEBUG oslo_concurrency.lockutils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 828.570121] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 5ed51dce-2a56-4389-acf8-280bd93ff5f0/5ed51dce-2a56-4389-acf8-280bd93ff5f0.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 828.574182] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f1f714dd-ddda-4773-a008-7f65cd3b3705 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.585828] env[62522]: DEBUG oslo_vmware.api [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415497, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.328548} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.587168] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 828.587296] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 828.587502] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 828.587706] env[62522]: INFO nova.compute.manager [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Took 1.17 seconds to destroy the instance on the hypervisor. [ 828.587977] env[62522]: DEBUG oslo.service.loopingcall [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 828.588459] env[62522]: DEBUG nova.compute.manager [-] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 828.588566] env[62522]: DEBUG nova.network.neutron [-] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 828.594351] env[62522]: DEBUG oslo_vmware.api [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415493, 'name': PowerOnVM_Task, 'duration_secs': 1.085796} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.594565] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 828.594565] env[62522]: value = "task-2415498" [ 828.594565] env[62522]: _type = "Task" [ 828.594565] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.595103] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 828.595473] env[62522]: INFO nova.compute.manager [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Took 9.67 seconds to spawn the instance on the hypervisor. [ 828.595473] env[62522]: DEBUG nova.compute.manager [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 828.598546] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-029a82df-2b96-49bf-a410-6cd8138ccebb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.620568] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415498, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.626038] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Acquiring lock "a185273e-cdaf-4967-832b-f75014b7b3f4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.626038] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Lock "a185273e-cdaf-4967-832b-f75014b7b3f4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.626195] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Acquiring lock "a185273e-cdaf-4967-832b-f75014b7b3f4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.626384] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Lock "a185273e-cdaf-4967-832b-f75014b7b3f4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.626559] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Lock "a185273e-cdaf-4967-832b-f75014b7b3f4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.628627] env[62522]: INFO nova.compute.manager [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Terminating instance [ 828.657175] env[62522]: DEBUG oslo_vmware.api [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415495, 'name': CreateSnapshot_Task, 'duration_secs': 0.807763} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.657473] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Created Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 828.658837] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b11b3db2-bd75-469b-bb0b-98f20bf7141b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.699574] env[62522]: DEBUG nova.network.neutron [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Updating instance_info_cache with network_info: [{"id": "71039daa-ce8b-462d-b9f3-8e07f9ec2666", "address": "fa:16:3e:d4:1b:d8", "network": {"id": "6cb7cdaa-cbcd-4565-a222-310242ff25b1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-684499993-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e47f8c538134439d8405e2825ad0af22", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0746f464-a938-427b-ba02-600449df5070", "external-id": "nsx-vlan-transportzone-881", "segmentation_id": 881, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71039daa-ce", "ovs_interfaceid": "71039daa-ce8b-462d-b9f3-8e07f9ec2666", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.890970] env[62522]: DEBUG oslo_concurrency.lockutils [None req-39c05041-4a1f-4eeb-8032-42f8c82a3e1f tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "interface-19d3d54c-5ba1-420f-b012-a08add8546c9-6f83c77d-45cc-446e-8a38-eb8a94e38f59" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 11.038s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.038724] env[62522]: DEBUG nova.network.neutron [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Successfully created port: 0436d465-681f-4cf5-b0e7-496837ecf964 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 829.075024] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.109269] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415498, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.135313] env[62522]: DEBUG nova.compute.manager [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 829.135487] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 829.137260] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f122355-5674-42c1-a51f-55465566a1c2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.140497] env[62522]: INFO nova.compute.manager [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Took 55.16 seconds to build instance. [ 829.150693] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 829.154725] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9243095c-4914-4ca7-81ee-90784b8336e6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.162387] env[62522]: DEBUG oslo_vmware.api [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Waiting for the task: (returnval){ [ 829.162387] env[62522]: value = "task-2415499" [ 829.162387] env[62522]: _type = "Task" [ 829.162387] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.178029] env[62522]: DEBUG oslo_vmware.api [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415499, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.183364] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Creating linked-clone VM from snapshot {{(pid=62522) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 829.186800] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a4e45d24-b232-449b-b026-692221989a59 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.196510] env[62522]: DEBUG oslo_vmware.api [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Waiting for the task: (returnval){ [ 829.196510] env[62522]: value = "task-2415500" [ 829.196510] env[62522]: _type = "Task" [ 829.196510] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.204927] env[62522]: DEBUG oslo_vmware.api [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415500, 'name': CloneVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.208917] env[62522]: DEBUG oslo_concurrency.lockutils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Releasing lock "refresh_cache-74e663b1-b552-4b71-aa74-308e908d79e7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.209269] env[62522]: DEBUG nova.compute.manager [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Instance network_info: |[{"id": "71039daa-ce8b-462d-b9f3-8e07f9ec2666", "address": "fa:16:3e:d4:1b:d8", "network": {"id": "6cb7cdaa-cbcd-4565-a222-310242ff25b1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-684499993-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e47f8c538134439d8405e2825ad0af22", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0746f464-a938-427b-ba02-600449df5070", "external-id": "nsx-vlan-transportzone-881", "segmentation_id": 881, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71039daa-ce", "ovs_interfaceid": "71039daa-ce8b-462d-b9f3-8e07f9ec2666", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 829.209932] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d4:1b:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0746f464-a938-427b-ba02-600449df5070', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '71039daa-ce8b-462d-b9f3-8e07f9ec2666', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 829.218943] env[62522]: DEBUG oslo.service.loopingcall [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 829.219254] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 829.219609] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3adbb179-919e-4304-a5de-ec5eb6cd9a93 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.239707] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9842e61-273c-47cd-9f56-6fbc3d39a0d0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.247655] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 829.247655] env[62522]: value = "task-2415501" [ 829.247655] env[62522]: _type = "Task" [ 829.247655] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.254602] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f71348b9-57f8-459d-a3fe-a7d70ee43568 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.263907] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415501, 'name': CreateVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.293302] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0493cd59-928c-44eb-a51d-4920f8a6bf0e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.301463] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f1e6c71-a19d-47bc-bee9-e60461d47d80 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.316552] env[62522]: DEBUG nova.compute.provider_tree [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 829.471048] env[62522]: DEBUG nova.compute.manager [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 829.503124] env[62522]: DEBUG nova.virt.hardware [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 829.503392] env[62522]: DEBUG nova.virt.hardware [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 829.504091] env[62522]: DEBUG nova.virt.hardware [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 829.504091] env[62522]: DEBUG nova.virt.hardware [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 829.504091] env[62522]: DEBUG nova.virt.hardware [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 829.504091] env[62522]: DEBUG nova.virt.hardware [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 829.504284] env[62522]: DEBUG nova.virt.hardware [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 829.504390] env[62522]: DEBUG nova.virt.hardware [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 829.504544] env[62522]: DEBUG nova.virt.hardware [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 829.504700] env[62522]: DEBUG nova.virt.hardware [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 829.504872] env[62522]: DEBUG nova.virt.hardware [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 829.505802] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd173ea-c0da-4564-8a3d-ec44712df4bb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.514175] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6d27153-d3e5-4f39-aa0b-cff552338bd6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.606153] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415498, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.848524} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.606711] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 5ed51dce-2a56-4389-acf8-280bd93ff5f0/5ed51dce-2a56-4389-acf8-280bd93ff5f0.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 829.606711] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 829.607045] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7617a220-c24b-4f54-af13-5769ad22466b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.616925] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 829.616925] env[62522]: value = "task-2415502" [ 829.616925] env[62522]: _type = "Task" [ 829.616925] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.628559] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415502, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.643651] env[62522]: DEBUG oslo_concurrency.lockutils [None req-25e3ad6c-3edd-4aa9-96b0-4fc6e15bcb90 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Lock "76cb551e-e605-4c80-a6ef-e36681fc0bc2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 99.976s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.676482] env[62522]: DEBUG oslo_vmware.api [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415499, 'name': PowerOffVM_Task, 'duration_secs': 0.501668} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.676831] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 829.676990] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 829.677355] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a2b06295-a6d8-4060-8e4f-8c7ad01d9620 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.708261] env[62522]: DEBUG oslo_vmware.api [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415500, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.758119] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415501, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.806529] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquiring lock "04a9d357-d094-487b-8f09-2f7e0c35f0d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.806529] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lock "04a9d357-d094-487b-8f09-2f7e0c35f0d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.823398] env[62522]: DEBUG nova.scheduler.client.report [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 829.892848] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 829.892848] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 829.892848] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Deleting the datastore file [datastore2] a185273e-cdaf-4967-832b-f75014b7b3f4 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 829.892848] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c6107c09-e04c-4e36-a671-a54f2aa52f04 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.899261] env[62522]: DEBUG oslo_vmware.api [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Waiting for the task: (returnval){ [ 829.899261] env[62522]: value = "task-2415504" [ 829.899261] env[62522]: _type = "Task" [ 829.899261] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.909160] env[62522]: DEBUG oslo_vmware.api [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415504, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.061663] env[62522]: DEBUG nova.compute.manager [req-6ab258b3-51a4-44ff-97c9-669b0d0a11a0 req-008a30e9-6a73-4dde-9b37-d1e200b262b6 service nova] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Received event network-changed-71039daa-ce8b-462d-b9f3-8e07f9ec2666 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 830.061912] env[62522]: DEBUG nova.compute.manager [req-6ab258b3-51a4-44ff-97c9-669b0d0a11a0 req-008a30e9-6a73-4dde-9b37-d1e200b262b6 service nova] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Refreshing instance network info cache due to event network-changed-71039daa-ce8b-462d-b9f3-8e07f9ec2666. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 830.063021] env[62522]: DEBUG oslo_concurrency.lockutils [req-6ab258b3-51a4-44ff-97c9-669b0d0a11a0 req-008a30e9-6a73-4dde-9b37-d1e200b262b6 service nova] Acquiring lock "refresh_cache-74e663b1-b552-4b71-aa74-308e908d79e7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.063021] env[62522]: DEBUG oslo_concurrency.lockutils [req-6ab258b3-51a4-44ff-97c9-669b0d0a11a0 req-008a30e9-6a73-4dde-9b37-d1e200b262b6 service nova] Acquired lock "refresh_cache-74e663b1-b552-4b71-aa74-308e908d79e7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.063021] env[62522]: DEBUG nova.network.neutron [req-6ab258b3-51a4-44ff-97c9-669b0d0a11a0 req-008a30e9-6a73-4dde-9b37-d1e200b262b6 service nova] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Refreshing network info cache for port 71039daa-ce8b-462d-b9f3-8e07f9ec2666 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 830.128158] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415502, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074851} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.132379] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 830.132379] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6f531e7-e1fe-4d19-ae87-62abe60933ec {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.153785] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Reconfiguring VM instance instance-00000030 to attach disk [datastore2] 5ed51dce-2a56-4389-acf8-280bd93ff5f0/5ed51dce-2a56-4389-acf8-280bd93ff5f0.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 830.154237] env[62522]: DEBUG nova.compute.manager [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 830.156872] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a685cd39-c8fd-4fe9-854b-5adeea2d1c01 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.181997] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 830.181997] env[62522]: value = "task-2415505" [ 830.181997] env[62522]: _type = "Task" [ 830.181997] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.191439] env[62522]: DEBUG nova.network.neutron [-] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.194600] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415505, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.195801] env[62522]: DEBUG nova.compute.manager [req-0da89114-0115-40f6-bad6-90de655bc61f req-e3a314b6-597e-4381-a5ea-0e7e66a0420b service nova] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Received event network-vif-deleted-bb09cad6-a323-4801-8cb8-7e58b646a38e {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 830.195801] env[62522]: INFO nova.compute.manager [req-0da89114-0115-40f6-bad6-90de655bc61f req-e3a314b6-597e-4381-a5ea-0e7e66a0420b service nova] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Neutron deleted interface bb09cad6-a323-4801-8cb8-7e58b646a38e; detaching it from the instance and deleting it from the info cache [ 830.195801] env[62522]: DEBUG nova.network.neutron [req-0da89114-0115-40f6-bad6-90de655bc61f req-e3a314b6-597e-4381-a5ea-0e7e66a0420b service nova] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.208936] env[62522]: DEBUG oslo_vmware.api [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415500, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.263188] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415501, 'name': CreateVM_Task, 'duration_secs': 0.511877} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.264071] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 830.264168] env[62522]: DEBUG oslo_concurrency.lockutils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.264296] env[62522]: DEBUG oslo_concurrency.lockutils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.264631] env[62522]: DEBUG oslo_concurrency.lockutils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 830.264899] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d77460e6-7b4c-4029-b469-f63e5140e707 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.270828] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 830.270828] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d7e42b-32e2-6e13-3bee-3c4df5f5c9bc" [ 830.270828] env[62522]: _type = "Task" [ 830.270828] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.279385] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d7e42b-32e2-6e13-3bee-3c4df5f5c9bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.327612] env[62522]: DEBUG oslo_concurrency.lockutils [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.892s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.328188] env[62522]: DEBUG nova.compute.manager [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 830.331541] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.242s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.333597] env[62522]: INFO nova.compute.claims [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 830.413834] env[62522]: DEBUG oslo_vmware.api [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415504, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164153} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.414289] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 830.414743] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 830.417030] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 830.417030] env[62522]: INFO nova.compute.manager [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Took 1.28 seconds to destroy the instance on the hypervisor. [ 830.417030] env[62522]: DEBUG oslo.service.loopingcall [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 830.417030] env[62522]: DEBUG nova.compute.manager [-] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 830.417030] env[62522]: DEBUG nova.network.neutron [-] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 830.696937] env[62522]: INFO nova.compute.manager [-] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Took 2.11 seconds to deallocate network for instance. [ 830.697322] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415505, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.699566] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bd558280-63f1-45ca-89dd-d3c8923c318c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.706114] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 830.717082] env[62522]: DEBUG oslo_vmware.api [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415500, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.721764] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4ddd8b5-22fa-4ba0-a6dc-d3f8b4482f76 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.764217] env[62522]: DEBUG nova.compute.manager [req-0da89114-0115-40f6-bad6-90de655bc61f req-e3a314b6-597e-4381-a5ea-0e7e66a0420b service nova] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Detach interface failed, port_id=bb09cad6-a323-4801-8cb8-7e58b646a38e, reason: Instance 19d3d54c-5ba1-420f-b012-a08add8546c9 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 830.793443] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d7e42b-32e2-6e13-3bee-3c4df5f5c9bc, 'name': SearchDatastore_Task, 'duration_secs': 0.010019} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.793912] env[62522]: DEBUG oslo_concurrency.lockutils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.794293] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 830.794669] env[62522]: DEBUG oslo_concurrency.lockutils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.794920] env[62522]: DEBUG oslo_concurrency.lockutils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.795454] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 830.795627] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-39b8d058-0d95-44df-96c9-15c30a543457 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.810326] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 830.810536] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 830.811299] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05958aca-dbdc-4ecc-b89f-39f43b9ec802 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.816739] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 830.816739] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a96360-3997-f886-96d0-f421e2599004" [ 830.816739] env[62522]: _type = "Task" [ 830.816739] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.829684] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a96360-3997-f886-96d0-f421e2599004, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.838619] env[62522]: DEBUG nova.compute.utils [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 830.844288] env[62522]: DEBUG nova.compute.manager [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 830.844288] env[62522]: DEBUG nova.network.neutron [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 830.929965] env[62522]: DEBUG nova.policy [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a10b77f3502a4e51a5e599b823f08db2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '962664c996f24cf9ae192f79fae18ca4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 831.140930] env[62522]: DEBUG nova.network.neutron [req-6ab258b3-51a4-44ff-97c9-669b0d0a11a0 req-008a30e9-6a73-4dde-9b37-d1e200b262b6 service nova] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Updated VIF entry in instance network info cache for port 71039daa-ce8b-462d-b9f3-8e07f9ec2666. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 831.140930] env[62522]: DEBUG nova.network.neutron [req-6ab258b3-51a4-44ff-97c9-669b0d0a11a0 req-008a30e9-6a73-4dde-9b37-d1e200b262b6 service nova] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Updating instance_info_cache with network_info: [{"id": "71039daa-ce8b-462d-b9f3-8e07f9ec2666", "address": "fa:16:3e:d4:1b:d8", "network": {"id": "6cb7cdaa-cbcd-4565-a222-310242ff25b1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-684499993-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e47f8c538134439d8405e2825ad0af22", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0746f464-a938-427b-ba02-600449df5070", "external-id": "nsx-vlan-transportzone-881", "segmentation_id": 881, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71039daa-ce", "ovs_interfaceid": "71039daa-ce8b-462d-b9f3-8e07f9ec2666", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.142899] env[62522]: DEBUG oslo_concurrency.lockutils [None req-05ef92ad-1b6d-4451-a671-f3fa14ec133b tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Acquiring lock "76cb551e-e605-4c80-a6ef-e36681fc0bc2" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.146121] env[62522]: DEBUG oslo_concurrency.lockutils [None req-05ef92ad-1b6d-4451-a671-f3fa14ec133b tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Lock "76cb551e-e605-4c80-a6ef-e36681fc0bc2" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.146570] env[62522]: INFO nova.compute.manager [None req-05ef92ad-1b6d-4451-a671-f3fa14ec133b tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Rebooting instance [ 831.196547] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415505, 'name': ReconfigVM_Task, 'duration_secs': 0.561336} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.196837] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Reconfigured VM instance instance-00000030 to attach disk [datastore2] 5ed51dce-2a56-4389-acf8-280bd93ff5f0/5ed51dce-2a56-4389-acf8-280bd93ff5f0.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 831.197863] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d8df7313-f0be-45c0-8fc1-02493c2b06a6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.210441] env[62522]: DEBUG oslo_vmware.api [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415500, 'name': CloneVM_Task, 'duration_secs': 1.781045} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.212106] env[62522]: INFO nova.virt.vmwareapi.vmops [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Created linked-clone VM from snapshot [ 831.212307] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 831.212307] env[62522]: value = "task-2415506" [ 831.212307] env[62522]: _type = "Task" [ 831.212307] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.216259] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6997dfea-a5eb-4a75-b81f-e203a17a7455 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.219670] env[62522]: DEBUG oslo_concurrency.lockutils [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.228297] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Uploading image 68ec4e19-e568-4d42-9b01-c03a649009f7 {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 831.233795] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415506, 'name': Rename_Task} progress is 10%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.260978] env[62522]: DEBUG oslo_vmware.rw_handles [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 831.260978] env[62522]: value = "vm-489707" [ 831.260978] env[62522]: _type = "VirtualMachine" [ 831.260978] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 831.261298] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-70442e61-585a-49f1-82cb-6fcfada06e34 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.269490] env[62522]: DEBUG oslo_vmware.rw_handles [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Lease: (returnval){ [ 831.269490] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52249155-2407-5d2e-2512-5fa3e0e321fe" [ 831.269490] env[62522]: _type = "HttpNfcLease" [ 831.269490] env[62522]: } obtained for exporting VM: (result){ [ 831.269490] env[62522]: value = "vm-489707" [ 831.269490] env[62522]: _type = "VirtualMachine" [ 831.269490] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 831.269490] env[62522]: DEBUG oslo_vmware.api [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Waiting for the lease: (returnval){ [ 831.269490] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52249155-2407-5d2e-2512-5fa3e0e321fe" [ 831.269490] env[62522]: _type = "HttpNfcLease" [ 831.269490] env[62522]: } to be ready. {{(pid=62522) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 831.275922] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 831.275922] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52249155-2407-5d2e-2512-5fa3e0e321fe" [ 831.275922] env[62522]: _type = "HttpNfcLease" [ 831.275922] env[62522]: } is initializing. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 831.331336] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a96360-3997-f886-96d0-f421e2599004, 'name': SearchDatastore_Task, 'duration_secs': 0.01441} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.332130] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae3c564a-bf4a-4b28-8647-c0ebf14611c3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.338233] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 831.338233] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]528ff4a9-a3af-45ba-93b3-4480b83cf0c9" [ 831.338233] env[62522]: _type = "Task" [ 831.338233] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.344037] env[62522]: DEBUG nova.compute.manager [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 831.359263] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]528ff4a9-a3af-45ba-93b3-4480b83cf0c9, 'name': SearchDatastore_Task, 'duration_secs': 0.012509} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.359729] env[62522]: DEBUG oslo_concurrency.lockutils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.362588] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 74e663b1-b552-4b71-aa74-308e908d79e7/74e663b1-b552-4b71-aa74-308e908d79e7.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 831.362588] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6e820959-36e4-4cda-adf1-7687f178cf6b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.370109] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 831.370109] env[62522]: value = "task-2415508" [ 831.370109] env[62522]: _type = "Task" [ 831.370109] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.379223] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415508, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.425925] env[62522]: DEBUG nova.network.neutron [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Successfully updated port: 0436d465-681f-4cf5-b0e7-496837ecf964 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 831.650896] env[62522]: DEBUG oslo_concurrency.lockutils [req-6ab258b3-51a4-44ff-97c9-669b0d0a11a0 req-008a30e9-6a73-4dde-9b37-d1e200b262b6 service nova] Releasing lock "refresh_cache-74e663b1-b552-4b71-aa74-308e908d79e7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.677131] env[62522]: DEBUG oslo_concurrency.lockutils [None req-05ef92ad-1b6d-4451-a671-f3fa14ec133b tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Acquiring lock "refresh_cache-76cb551e-e605-4c80-a6ef-e36681fc0bc2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 831.677336] env[62522]: DEBUG oslo_concurrency.lockutils [None req-05ef92ad-1b6d-4451-a671-f3fa14ec133b tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Acquired lock "refresh_cache-76cb551e-e605-4c80-a6ef-e36681fc0bc2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.677506] env[62522]: DEBUG nova.network.neutron [None req-05ef92ad-1b6d-4451-a671-f3fa14ec133b tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 831.733743] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415506, 'name': Rename_Task, 'duration_secs': 0.15959} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.734329] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 831.734680] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dba939df-43c7-4991-8d34-2c437e3686fc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.749835] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 831.749835] env[62522]: value = "task-2415509" [ 831.749835] env[62522]: _type = "Task" [ 831.749835] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.761353] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415509, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.778323] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 831.778323] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52249155-2407-5d2e-2512-5fa3e0e321fe" [ 831.778323] env[62522]: _type = "HttpNfcLease" [ 831.778323] env[62522]: } is ready. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 831.779369] env[62522]: DEBUG oslo_vmware.rw_handles [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 831.779369] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52249155-2407-5d2e-2512-5fa3e0e321fe" [ 831.779369] env[62522]: _type = "HttpNfcLease" [ 831.779369] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 831.780202] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f6c071-e906-4f32-9753-74e8e8e10bd2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.792940] env[62522]: DEBUG oslo_vmware.rw_handles [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52544ed7-e0fc-7852-df21-b4bb521b899c/disk-0.vmdk from lease info. {{(pid=62522) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 831.793182] env[62522]: DEBUG oslo_vmware.rw_handles [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52544ed7-e0fc-7852-df21-b4bb521b899c/disk-0.vmdk for reading. {{(pid=62522) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 831.882564] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415508, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497886} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.882968] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 74e663b1-b552-4b71-aa74-308e908d79e7/74e663b1-b552-4b71-aa74-308e908d79e7.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 831.883223] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 831.883664] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bead5f86-08d5-432a-9f0f-77d9ad62bba9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.892444] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 831.892444] env[62522]: value = "task-2415510" [ 831.892444] env[62522]: _type = "Task" [ 831.892444] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.898934] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415510, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.914579] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-69c2a006-d107-4d28-bd2e-9dc5fb9a32e7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.927833] env[62522]: DEBUG oslo_concurrency.lockutils [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Acquiring lock "refresh_cache-7a086314-3e49-48e9-82c9-cead8ecb19d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 831.927833] env[62522]: DEBUG oslo_concurrency.lockutils [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Acquired lock "refresh_cache-7a086314-3e49-48e9-82c9-cead8ecb19d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.927833] env[62522]: DEBUG nova.network.neutron [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 831.965135] env[62522]: DEBUG nova.network.neutron [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Successfully created port: 36fe2fd3-3447-4032-8c02-5be9712b769d {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 832.095969] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dfd8c10-7521-4fdb-aba1-94380f8b44c8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.103926] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ac0fff0-eb1b-4382-baee-52c349a09807 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.137680] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5de4e314-9470-4a46-b179-e36e97c41147 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.145818] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b535f0e-41c0-4c09-ab08-0a0bb08742d3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.153360] env[62522]: DEBUG nova.compute.manager [req-4daed375-e8c3-47c7-acb8-8d26b04f0c1f req-12436681-6479-4f6c-a6b4-d2ce37583f41 service nova] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Received event network-changed-1e118d2e-4933-4fb5-8582-23601144447f {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 832.153551] env[62522]: DEBUG nova.compute.manager [req-4daed375-e8c3-47c7-acb8-8d26b04f0c1f req-12436681-6479-4f6c-a6b4-d2ce37583f41 service nova] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Refreshing instance network info cache due to event network-changed-1e118d2e-4933-4fb5-8582-23601144447f. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 832.153744] env[62522]: DEBUG oslo_concurrency.lockutils [req-4daed375-e8c3-47c7-acb8-8d26b04f0c1f req-12436681-6479-4f6c-a6b4-d2ce37583f41 service nova] Acquiring lock "refresh_cache-76cb551e-e605-4c80-a6ef-e36681fc0bc2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.164850] env[62522]: DEBUG nova.compute.provider_tree [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 832.260319] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415509, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.338145] env[62522]: DEBUG nova.compute.manager [req-f0e2a6d3-3bee-4f23-8f06-085d6dd1170a req-7bc64738-2b11-42c5-9e99-c03521a7dafa service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Received event network-vif-deleted-30356a78-c3a1-4db1-8efa-ccd3f3e4afd1 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 832.338742] env[62522]: INFO nova.compute.manager [req-f0e2a6d3-3bee-4f23-8f06-085d6dd1170a req-7bc64738-2b11-42c5-9e99-c03521a7dafa service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Neutron deleted interface 30356a78-c3a1-4db1-8efa-ccd3f3e4afd1; detaching it from the instance and deleting it from the info cache [ 832.338961] env[62522]: DEBUG nova.network.neutron [req-f0e2a6d3-3bee-4f23-8f06-085d6dd1170a req-7bc64738-2b11-42c5-9e99-c03521a7dafa service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Updating instance_info_cache with network_info: [{"id": "3e725d96-bba9-4651-8fc1-70f66a94b0d1", "address": "fa:16:3e:a2:a8:c7", "network": {"id": "d661d493-fae5-4b41-95f1-78fcf7ded492", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1318459925", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.145", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ca7e42d226a4ef6b48b882356da8950", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1fb81f98-6f5a-47ab-a512-27277591d064", "external-id": "nsx-vlan-transportzone-624", "segmentation_id": 624, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e725d96-bb", "ovs_interfaceid": "3e725d96-bba9-4651-8fc1-70f66a94b0d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ded8b2e3-ee39-454b-97d6-5001bdbb8f72", "address": "fa:16:3e:a6:42:a0", "network": {"id": "2402eaa3-cb14-4b01-a2ba-0d026901bac0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1525415661", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.202", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "3ca7e42d226a4ef6b48b882356da8950", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapded8b2e3-ee", "ovs_interfaceid": "ded8b2e3-ee39-454b-97d6-5001bdbb8f72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.363544] env[62522]: DEBUG nova.compute.manager [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 832.408921] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415510, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.107139} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.411670] env[62522]: DEBUG nova.virt.hardware [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 832.412191] env[62522]: DEBUG nova.virt.hardware [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 832.415874] env[62522]: DEBUG nova.virt.hardware [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 832.416133] env[62522]: DEBUG nova.virt.hardware [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 832.416289] env[62522]: DEBUG nova.virt.hardware [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 832.416444] env[62522]: DEBUG nova.virt.hardware [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 832.416667] env[62522]: DEBUG nova.virt.hardware [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 832.416827] env[62522]: DEBUG nova.virt.hardware [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 832.417429] env[62522]: DEBUG nova.virt.hardware [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 832.417429] env[62522]: DEBUG nova.virt.hardware [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 832.417429] env[62522]: DEBUG nova.virt.hardware [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 832.417982] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 832.418725] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77822b40-28fd-4841-8b3e-2e7e129113bc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.423415] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ce82ad1-0882-470d-be9c-3f5cd7c11e96 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.453503] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd76282d-791a-44dc-a384-6dc41853113c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.467348] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Reconfiguring VM instance instance-00000031 to attach disk [datastore2] 74e663b1-b552-4b71-aa74-308e908d79e7/74e663b1-b552-4b71-aa74-308e908d79e7.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 832.474426] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-91b5320e-2806-4fa6-b819-1b07b6daccd0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.499220] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 832.499220] env[62522]: value = "task-2415511" [ 832.499220] env[62522]: _type = "Task" [ 832.499220] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.507705] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415511, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.528506] env[62522]: DEBUG nova.network.neutron [-] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.551926] env[62522]: DEBUG nova.network.neutron [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 832.586693] env[62522]: DEBUG nova.network.neutron [None req-05ef92ad-1b6d-4451-a671-f3fa14ec133b tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Updating instance_info_cache with network_info: [{"id": "1e118d2e-4933-4fb5-8582-23601144447f", "address": "fa:16:3e:66:01:ad", "network": {"id": "27951c52-e28e-4c94-968c-c1b5ddd6b58b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1545103257-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ec421e0535f04c2ba17759e8342e1897", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e118d2e-49", "ovs_interfaceid": "1e118d2e-4933-4fb5-8582-23601144447f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.669124] env[62522]: DEBUG nova.scheduler.client.report [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 832.763488] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415509, 'name': PowerOnVM_Task, 'duration_secs': 0.514293} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.764403] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 832.764403] env[62522]: INFO nova.compute.manager [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Took 11.17 seconds to spawn the instance on the hypervisor. [ 832.764563] env[62522]: DEBUG nova.compute.manager [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 832.765640] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca97f866-5b4f-4b69-8066-711d8a65fd01 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.841701] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-016edbd9-8e0d-41d3-a7a5-f0a8e414bac7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.853316] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a42db5ee-c366-4d62-add8-b0822136ce4f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.886847] env[62522]: DEBUG nova.compute.manager [req-f0e2a6d3-3bee-4f23-8f06-085d6dd1170a req-7bc64738-2b11-42c5-9e99-c03521a7dafa service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Detach interface failed, port_id=30356a78-c3a1-4db1-8efa-ccd3f3e4afd1, reason: Instance a185273e-cdaf-4967-832b-f75014b7b3f4 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 832.887097] env[62522]: DEBUG nova.compute.manager [req-f0e2a6d3-3bee-4f23-8f06-085d6dd1170a req-7bc64738-2b11-42c5-9e99-c03521a7dafa service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Received event network-vif-deleted-3e725d96-bba9-4651-8fc1-70f66a94b0d1 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 832.887374] env[62522]: INFO nova.compute.manager [req-f0e2a6d3-3bee-4f23-8f06-085d6dd1170a req-7bc64738-2b11-42c5-9e99-c03521a7dafa service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Neutron deleted interface 3e725d96-bba9-4651-8fc1-70f66a94b0d1; detaching it from the instance and deleting it from the info cache [ 832.887719] env[62522]: DEBUG nova.network.neutron [req-f0e2a6d3-3bee-4f23-8f06-085d6dd1170a req-7bc64738-2b11-42c5-9e99-c03521a7dafa service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Updating instance_info_cache with network_info: [{"id": "ded8b2e3-ee39-454b-97d6-5001bdbb8f72", "address": "fa:16:3e:a6:42:a0", "network": {"id": "2402eaa3-cb14-4b01-a2ba-0d026901bac0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1525415661", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.202", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "3ca7e42d226a4ef6b48b882356da8950", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapded8b2e3-ee", "ovs_interfaceid": "ded8b2e3-ee39-454b-97d6-5001bdbb8f72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.906299] env[62522]: DEBUG nova.network.neutron [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Updating instance_info_cache with network_info: [{"id": "0436d465-681f-4cf5-b0e7-496837ecf964", "address": "fa:16:3e:76:52:a9", "network": {"id": "b9ea9e3f-83bc-463d-af04-cc9483811abe", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1832272461-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23589532e76f45e1aa7c1f48a2022a19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0436d465-68", "ovs_interfaceid": "0436d465-681f-4cf5-b0e7-496837ecf964", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.009993] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415511, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.030688] env[62522]: INFO nova.compute.manager [-] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Took 2.61 seconds to deallocate network for instance. [ 833.093971] env[62522]: DEBUG oslo_concurrency.lockutils [None req-05ef92ad-1b6d-4451-a671-f3fa14ec133b tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Releasing lock "refresh_cache-76cb551e-e605-4c80-a6ef-e36681fc0bc2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.095808] env[62522]: DEBUG oslo_concurrency.lockutils [req-4daed375-e8c3-47c7-acb8-8d26b04f0c1f req-12436681-6479-4f6c-a6b4-d2ce37583f41 service nova] Acquired lock "refresh_cache-76cb551e-e605-4c80-a6ef-e36681fc0bc2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.099648] env[62522]: DEBUG nova.network.neutron [req-4daed375-e8c3-47c7-acb8-8d26b04f0c1f req-12436681-6479-4f6c-a6b4-d2ce37583f41 service nova] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Refreshing network info cache for port 1e118d2e-4933-4fb5-8582-23601144447f {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 833.173818] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.842s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.174397] env[62522]: DEBUG nova.compute.manager [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 833.180188] env[62522]: DEBUG oslo_concurrency.lockutils [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 36.423s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.181338] env[62522]: DEBUG nova.objects.instance [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62522) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 833.290581] env[62522]: INFO nova.compute.manager [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Took 55.12 seconds to build instance. [ 833.390532] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f4944eac-a00b-41c5-b1f7-9859b2aed702 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.401981] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-002f4304-57c1-43dd-8a92-aef837b8538f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.415040] env[62522]: DEBUG oslo_concurrency.lockutils [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Releasing lock "refresh_cache-7a086314-3e49-48e9-82c9-cead8ecb19d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.415040] env[62522]: DEBUG nova.compute.manager [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Instance network_info: |[{"id": "0436d465-681f-4cf5-b0e7-496837ecf964", "address": "fa:16:3e:76:52:a9", "network": {"id": "b9ea9e3f-83bc-463d-af04-cc9483811abe", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1832272461-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23589532e76f45e1aa7c1f48a2022a19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0436d465-68", "ovs_interfaceid": "0436d465-681f-4cf5-b0e7-496837ecf964", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 833.415040] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:52:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d2742ba-c3af-4412-877d-c2811dfeba46', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0436d465-681f-4cf5-b0e7-496837ecf964', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 833.422211] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Creating folder: Project (23589532e76f45e1aa7c1f48a2022a19). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 833.422985] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0633b4e0-5403-4b05-826e-e2e63fb7b2aa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.433349] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Created folder: Project (23589532e76f45e1aa7c1f48a2022a19) in parent group-v489562. [ 833.433616] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Creating folder: Instances. Parent ref: group-v489709. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 833.446848] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e0ae041b-e4a8-4955-beb6-58366214c0c3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.449317] env[62522]: DEBUG nova.compute.manager [req-f0e2a6d3-3bee-4f23-8f06-085d6dd1170a req-7bc64738-2b11-42c5-9e99-c03521a7dafa service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Detach interface failed, port_id=3e725d96-bba9-4651-8fc1-70f66a94b0d1, reason: Instance a185273e-cdaf-4967-832b-f75014b7b3f4 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 833.449698] env[62522]: DEBUG nova.compute.manager [req-f0e2a6d3-3bee-4f23-8f06-085d6dd1170a req-7bc64738-2b11-42c5-9e99-c03521a7dafa service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Received event network-vif-deleted-ded8b2e3-ee39-454b-97d6-5001bdbb8f72 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 833.449852] env[62522]: INFO nova.compute.manager [req-f0e2a6d3-3bee-4f23-8f06-085d6dd1170a req-7bc64738-2b11-42c5-9e99-c03521a7dafa service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Neutron deleted interface ded8b2e3-ee39-454b-97d6-5001bdbb8f72; detaching it from the instance and deleting it from the info cache [ 833.450107] env[62522]: DEBUG nova.network.neutron [req-f0e2a6d3-3bee-4f23-8f06-085d6dd1170a req-7bc64738-2b11-42c5-9e99-c03521a7dafa service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.459747] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Created folder: Instances in parent group-v489709. [ 833.460972] env[62522]: DEBUG oslo.service.loopingcall [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 833.460972] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 833.461087] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e8a7ae2b-b8de-4441-b217-1ff425624317 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.483554] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 833.483554] env[62522]: value = "task-2415514" [ 833.483554] env[62522]: _type = "Task" [ 833.483554] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.492314] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415514, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.509291] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415511, 'name': ReconfigVM_Task, 'duration_secs': 0.627401} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.510125] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Reconfigured VM instance instance-00000031 to attach disk [datastore2] 74e663b1-b552-4b71-aa74-308e908d79e7/74e663b1-b552-4b71-aa74-308e908d79e7.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 833.510465] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-be71ce10-bc6e-4e67-b56a-3aa690ef01a6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.517479] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 833.517479] env[62522]: value = "task-2415515" [ 833.517479] env[62522]: _type = "Task" [ 833.517479] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.528073] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415515, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.541429] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.608387] env[62522]: DEBUG nova.compute.manager [None req-05ef92ad-1b6d-4451-a671-f3fa14ec133b tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 833.609047] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d5bb4cc-dc0b-4007-b0d5-bff2ac226057 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.685406] env[62522]: DEBUG nova.compute.utils [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 833.690223] env[62522]: DEBUG nova.compute.manager [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 833.690384] env[62522]: DEBUG nova.network.neutron [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 833.756408] env[62522]: DEBUG nova.policy [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aef4d9ff8c95414a8c680ca612baa660', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '04ba6295b89743a184cc64343ac6bbaf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 833.793425] env[62522]: DEBUG oslo_concurrency.lockutils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "5ed51dce-2a56-4389-acf8-280bd93ff5f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 103.358s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.845547] env[62522]: DEBUG nova.network.neutron [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Successfully updated port: 36fe2fd3-3447-4032-8c02-5be9712b769d {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 833.964364] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-01f555bb-0d84-488d-b38b-d2164fccfb7c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.972172] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d20ae1a6-0e62-4916-a3ea-816a141ad0f0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.999228] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415514, 'name': CreateVM_Task, 'duration_secs': 0.416766} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.999408] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 834.000230] env[62522]: DEBUG oslo_concurrency.lockutils [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.000359] env[62522]: DEBUG oslo_concurrency.lockutils [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.000734] env[62522]: DEBUG oslo_concurrency.lockutils [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 834.014903] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-160514fb-29f1-4e4e-8194-5e2321e5bfdb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.016954] env[62522]: DEBUG nova.compute.manager [req-f0e2a6d3-3bee-4f23-8f06-085d6dd1170a req-7bc64738-2b11-42c5-9e99-c03521a7dafa service nova] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Detach interface failed, port_id=ded8b2e3-ee39-454b-97d6-5001bdbb8f72, reason: Instance a185273e-cdaf-4967-832b-f75014b7b3f4 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 834.022944] env[62522]: DEBUG oslo_vmware.api [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Waiting for the task: (returnval){ [ 834.022944] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]524d1288-251b-5306-d77e-d486ff8bf3f6" [ 834.022944] env[62522]: _type = "Task" [ 834.022944] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.029822] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415515, 'name': Rename_Task, 'duration_secs': 0.214069} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.030511] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 834.030777] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2a8d4553-4642-4c20-9fa8-dd3a81d0c5fd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.036255] env[62522]: DEBUG oslo_vmware.api [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]524d1288-251b-5306-d77e-d486ff8bf3f6, 'name': SearchDatastore_Task, 'duration_secs': 0.009963} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.037181] env[62522]: DEBUG nova.network.neutron [req-4daed375-e8c3-47c7-acb8-8d26b04f0c1f req-12436681-6479-4f6c-a6b4-d2ce37583f41 service nova] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Updated VIF entry in instance network info cache for port 1e118d2e-4933-4fb5-8582-23601144447f. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 834.037480] env[62522]: DEBUG nova.network.neutron [req-4daed375-e8c3-47c7-acb8-8d26b04f0c1f req-12436681-6479-4f6c-a6b4-d2ce37583f41 service nova] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Updating instance_info_cache with network_info: [{"id": "1e118d2e-4933-4fb5-8582-23601144447f", "address": "fa:16:3e:66:01:ad", "network": {"id": "27951c52-e28e-4c94-968c-c1b5ddd6b58b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1545103257-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ec421e0535f04c2ba17759e8342e1897", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e118d2e-49", "ovs_interfaceid": "1e118d2e-4933-4fb5-8582-23601144447f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.039180] env[62522]: DEBUG oslo_concurrency.lockutils [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.039352] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 834.039583] env[62522]: DEBUG oslo_concurrency.lockutils [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.039734] env[62522]: DEBUG oslo_concurrency.lockutils [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.039977] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 834.040448] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c370b12e-9b96-442b-b87b-5568e0d63637 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.044042] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 834.044042] env[62522]: value = "task-2415516" [ 834.044042] env[62522]: _type = "Task" [ 834.044042] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.050786] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 834.050786] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 834.054424] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-854aebe7-1420-4dcf-bae6-ceabd3f44bda {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.057580] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415516, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.060534] env[62522]: DEBUG oslo_vmware.api [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Waiting for the task: (returnval){ [ 834.060534] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52526583-0e1f-c62f-85a3-633b4dd514c7" [ 834.060534] env[62522]: _type = "Task" [ 834.060534] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.070251] env[62522]: DEBUG oslo_vmware.api [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52526583-0e1f-c62f-85a3-633b4dd514c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.190636] env[62522]: DEBUG nova.compute.manager [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 834.194173] env[62522]: DEBUG oslo_concurrency.lockutils [None req-561dd5e5-dfd5-4422-a06c-a68445f4e8df tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.195554] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.686s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.197076] env[62522]: INFO nova.compute.claims [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 834.211435] env[62522]: DEBUG nova.network.neutron [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Successfully created port: 36b110ee-cabf-4e98-b183-605196991aec {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 834.296736] env[62522]: DEBUG nova.compute.manager [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 834.347913] env[62522]: DEBUG oslo_concurrency.lockutils [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "refresh_cache-bf44e269-0297-473e-b6ce-04a40d0ec1b4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.348096] env[62522]: DEBUG oslo_concurrency.lockutils [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquired lock "refresh_cache-bf44e269-0297-473e-b6ce-04a40d0ec1b4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.348274] env[62522]: DEBUG nova.network.neutron [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 834.541991] env[62522]: DEBUG oslo_concurrency.lockutils [req-4daed375-e8c3-47c7-acb8-8d26b04f0c1f req-12436681-6479-4f6c-a6b4-d2ce37583f41 service nova] Releasing lock "refresh_cache-76cb551e-e605-4c80-a6ef-e36681fc0bc2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.542321] env[62522]: DEBUG nova.compute.manager [req-4daed375-e8c3-47c7-acb8-8d26b04f0c1f req-12436681-6479-4f6c-a6b4-d2ce37583f41 service nova] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Received event network-vif-plugged-0436d465-681f-4cf5-b0e7-496837ecf964 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 834.542529] env[62522]: DEBUG oslo_concurrency.lockutils [req-4daed375-e8c3-47c7-acb8-8d26b04f0c1f req-12436681-6479-4f6c-a6b4-d2ce37583f41 service nova] Acquiring lock "7a086314-3e49-48e9-82c9-cead8ecb19d1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.542737] env[62522]: DEBUG oslo_concurrency.lockutils [req-4daed375-e8c3-47c7-acb8-8d26b04f0c1f req-12436681-6479-4f6c-a6b4-d2ce37583f41 service nova] Lock "7a086314-3e49-48e9-82c9-cead8ecb19d1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.542899] env[62522]: DEBUG oslo_concurrency.lockutils [req-4daed375-e8c3-47c7-acb8-8d26b04f0c1f req-12436681-6479-4f6c-a6b4-d2ce37583f41 service nova] Lock "7a086314-3e49-48e9-82c9-cead8ecb19d1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.543092] env[62522]: DEBUG nova.compute.manager [req-4daed375-e8c3-47c7-acb8-8d26b04f0c1f req-12436681-6479-4f6c-a6b4-d2ce37583f41 service nova] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] No waiting events found dispatching network-vif-plugged-0436d465-681f-4cf5-b0e7-496837ecf964 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 834.543268] env[62522]: WARNING nova.compute.manager [req-4daed375-e8c3-47c7-acb8-8d26b04f0c1f req-12436681-6479-4f6c-a6b4-d2ce37583f41 service nova] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Received unexpected event network-vif-plugged-0436d465-681f-4cf5-b0e7-496837ecf964 for instance with vm_state building and task_state spawning. [ 834.543433] env[62522]: DEBUG nova.compute.manager [req-4daed375-e8c3-47c7-acb8-8d26b04f0c1f req-12436681-6479-4f6c-a6b4-d2ce37583f41 service nova] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Received event network-changed-0436d465-681f-4cf5-b0e7-496837ecf964 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 834.543592] env[62522]: DEBUG nova.compute.manager [req-4daed375-e8c3-47c7-acb8-8d26b04f0c1f req-12436681-6479-4f6c-a6b4-d2ce37583f41 service nova] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Refreshing instance network info cache due to event network-changed-0436d465-681f-4cf5-b0e7-496837ecf964. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 834.543775] env[62522]: DEBUG oslo_concurrency.lockutils [req-4daed375-e8c3-47c7-acb8-8d26b04f0c1f req-12436681-6479-4f6c-a6b4-d2ce37583f41 service nova] Acquiring lock "refresh_cache-7a086314-3e49-48e9-82c9-cead8ecb19d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.543913] env[62522]: DEBUG oslo_concurrency.lockutils [req-4daed375-e8c3-47c7-acb8-8d26b04f0c1f req-12436681-6479-4f6c-a6b4-d2ce37583f41 service nova] Acquired lock "refresh_cache-7a086314-3e49-48e9-82c9-cead8ecb19d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.544087] env[62522]: DEBUG nova.network.neutron [req-4daed375-e8c3-47c7-acb8-8d26b04f0c1f req-12436681-6479-4f6c-a6b4-d2ce37583f41 service nova] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Refreshing network info cache for port 0436d465-681f-4cf5-b0e7-496837ecf964 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 834.560927] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415516, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.573449] env[62522]: DEBUG oslo_vmware.api [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52526583-0e1f-c62f-85a3-633b4dd514c7, 'name': SearchDatastore_Task, 'duration_secs': 0.010038} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.574271] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7022bfa4-9814-4f52-9204-e8641068cf69 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.579871] env[62522]: DEBUG oslo_vmware.api [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Waiting for the task: (returnval){ [ 834.579871] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]524543bc-161d-398c-4524-767b259acf11" [ 834.579871] env[62522]: _type = "Task" [ 834.579871] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.591489] env[62522]: DEBUG oslo_vmware.api [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]524543bc-161d-398c-4524-767b259acf11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.629322] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-173dca77-b7c8-45ba-a7df-63ba82c88ebe {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.636924] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-05ef92ad-1b6d-4451-a671-f3fa14ec133b tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Doing hard reboot of VM {{(pid=62522) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 834.637221] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-83192320-34c6-4a81-a462-1e5691d8b5e4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.645932] env[62522]: DEBUG oslo_vmware.api [None req-05ef92ad-1b6d-4451-a671-f3fa14ec133b tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Waiting for the task: (returnval){ [ 834.645932] env[62522]: value = "task-2415517" [ 834.645932] env[62522]: _type = "Task" [ 834.645932] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.654713] env[62522]: DEBUG oslo_vmware.api [None req-05ef92ad-1b6d-4451-a671-f3fa14ec133b tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415517, 'name': ResetVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.723038] env[62522]: DEBUG nova.compute.manager [req-09597023-f8d9-4b8b-ac0d-0b1c86599d70 req-27c43206-9352-4c5a-987a-e9a626a66043 service nova] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Received event network-vif-plugged-36fe2fd3-3447-4032-8c02-5be9712b769d {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 834.723038] env[62522]: DEBUG oslo_concurrency.lockutils [req-09597023-f8d9-4b8b-ac0d-0b1c86599d70 req-27c43206-9352-4c5a-987a-e9a626a66043 service nova] Acquiring lock "bf44e269-0297-473e-b6ce-04a40d0ec1b4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.723038] env[62522]: DEBUG oslo_concurrency.lockutils [req-09597023-f8d9-4b8b-ac0d-0b1c86599d70 req-27c43206-9352-4c5a-987a-e9a626a66043 service nova] Lock "bf44e269-0297-473e-b6ce-04a40d0ec1b4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.723038] env[62522]: DEBUG oslo_concurrency.lockutils [req-09597023-f8d9-4b8b-ac0d-0b1c86599d70 req-27c43206-9352-4c5a-987a-e9a626a66043 service nova] Lock "bf44e269-0297-473e-b6ce-04a40d0ec1b4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.723038] env[62522]: DEBUG nova.compute.manager [req-09597023-f8d9-4b8b-ac0d-0b1c86599d70 req-27c43206-9352-4c5a-987a-e9a626a66043 service nova] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] No waiting events found dispatching network-vif-plugged-36fe2fd3-3447-4032-8c02-5be9712b769d {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 834.723907] env[62522]: WARNING nova.compute.manager [req-09597023-f8d9-4b8b-ac0d-0b1c86599d70 req-27c43206-9352-4c5a-987a-e9a626a66043 service nova] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Received unexpected event network-vif-plugged-36fe2fd3-3447-4032-8c02-5be9712b769d for instance with vm_state building and task_state spawning. [ 834.724177] env[62522]: DEBUG nova.compute.manager [req-09597023-f8d9-4b8b-ac0d-0b1c86599d70 req-27c43206-9352-4c5a-987a-e9a626a66043 service nova] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Received event network-changed-36fe2fd3-3447-4032-8c02-5be9712b769d {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 834.724347] env[62522]: DEBUG nova.compute.manager [req-09597023-f8d9-4b8b-ac0d-0b1c86599d70 req-27c43206-9352-4c5a-987a-e9a626a66043 service nova] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Refreshing instance network info cache due to event network-changed-36fe2fd3-3447-4032-8c02-5be9712b769d. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 834.725031] env[62522]: DEBUG oslo_concurrency.lockutils [req-09597023-f8d9-4b8b-ac0d-0b1c86599d70 req-27c43206-9352-4c5a-987a-e9a626a66043 service nova] Acquiring lock "refresh_cache-bf44e269-0297-473e-b6ce-04a40d0ec1b4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.845946] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.912129] env[62522]: DEBUG nova.network.neutron [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 835.059607] env[62522]: DEBUG oslo_vmware.api [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415516, 'name': PowerOnVM_Task, 'duration_secs': 0.831592} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.059921] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 835.060141] env[62522]: INFO nova.compute.manager [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Took 9.75 seconds to spawn the instance on the hypervisor. [ 835.060342] env[62522]: DEBUG nova.compute.manager [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 835.061543] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e40d1d4-36fc-4e66-94b2-f2631da1ecb0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.092971] env[62522]: DEBUG oslo_vmware.api [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]524543bc-161d-398c-4524-767b259acf11, 'name': SearchDatastore_Task, 'duration_secs': 0.014442} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.096035] env[62522]: DEBUG oslo_concurrency.lockutils [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.096409] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 7a086314-3e49-48e9-82c9-cead8ecb19d1/7a086314-3e49-48e9-82c9-cead8ecb19d1.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 835.097666] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-039b56fb-a0be-436e-a6e2-5f800e96f085 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.104656] env[62522]: DEBUG oslo_vmware.api [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Waiting for the task: (returnval){ [ 835.104656] env[62522]: value = "task-2415518" [ 835.104656] env[62522]: _type = "Task" [ 835.104656] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.115815] env[62522]: DEBUG oslo_vmware.api [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Task: {'id': task-2415518, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.163365] env[62522]: DEBUG oslo_vmware.api [None req-05ef92ad-1b6d-4451-a671-f3fa14ec133b tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415517, 'name': ResetVM_Task, 'duration_secs': 0.099654} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.163762] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-05ef92ad-1b6d-4451-a671-f3fa14ec133b tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Did hard reboot of VM {{(pid=62522) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 835.164053] env[62522]: DEBUG nova.compute.manager [None req-05ef92ad-1b6d-4451-a671-f3fa14ec133b tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 835.165302] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47340337-38a4-4fc5-87df-fdd262f2a3da {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.187137] env[62522]: DEBUG nova.network.neutron [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Updating instance_info_cache with network_info: [{"id": "36fe2fd3-3447-4032-8c02-5be9712b769d", "address": "fa:16:3e:2e:5d:25", "network": {"id": "5f1d73d1-ff9e-4081-87cf-8df6294f67c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-892212702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "962664c996f24cf9ae192f79fae18ca4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "419a5b3f-4c6f-4168-9def-746b4d8c5c24", "external-id": "nsx-vlan-transportzone-656", "segmentation_id": 656, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36fe2fd3-34", "ovs_interfaceid": "36fe2fd3-3447-4032-8c02-5be9712b769d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.200153] env[62522]: DEBUG nova.compute.manager [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 835.236750] env[62522]: DEBUG nova.virt.hardware [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 835.237030] env[62522]: DEBUG nova.virt.hardware [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 835.237210] env[62522]: DEBUG nova.virt.hardware [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 835.237418] env[62522]: DEBUG nova.virt.hardware [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 835.237614] env[62522]: DEBUG nova.virt.hardware [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 835.237850] env[62522]: DEBUG nova.virt.hardware [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 835.238020] env[62522]: DEBUG nova.virt.hardware [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 835.238199] env[62522]: DEBUG nova.virt.hardware [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 835.238381] env[62522]: DEBUG nova.virt.hardware [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 835.238552] env[62522]: DEBUG nova.virt.hardware [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 835.238748] env[62522]: DEBUG nova.virt.hardware [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 835.240151] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a6380e7-4afd-412c-a6a3-c6d761ec512b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.250722] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d6cbba-5d06-42b5-91be-972b00bd635f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.587427] env[62522]: INFO nova.compute.manager [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Took 55.20 seconds to build instance. [ 835.625993] env[62522]: DEBUG oslo_vmware.api [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Task: {'id': task-2415518, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.685503] env[62522]: DEBUG oslo_concurrency.lockutils [None req-05ef92ad-1b6d-4451-a671-f3fa14ec133b tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Lock "76cb551e-e605-4c80-a6ef-e36681fc0bc2" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.542s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.689414] env[62522]: DEBUG oslo_concurrency.lockutils [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Releasing lock "refresh_cache-bf44e269-0297-473e-b6ce-04a40d0ec1b4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.689766] env[62522]: DEBUG nova.compute.manager [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Instance network_info: |[{"id": "36fe2fd3-3447-4032-8c02-5be9712b769d", "address": "fa:16:3e:2e:5d:25", "network": {"id": "5f1d73d1-ff9e-4081-87cf-8df6294f67c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-892212702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "962664c996f24cf9ae192f79fae18ca4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "419a5b3f-4c6f-4168-9def-746b4d8c5c24", "external-id": "nsx-vlan-transportzone-656", "segmentation_id": 656, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36fe2fd3-34", "ovs_interfaceid": "36fe2fd3-3447-4032-8c02-5be9712b769d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 835.691447] env[62522]: DEBUG nova.network.neutron [req-4daed375-e8c3-47c7-acb8-8d26b04f0c1f req-12436681-6479-4f6c-a6b4-d2ce37583f41 service nova] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Updated VIF entry in instance network info cache for port 0436d465-681f-4cf5-b0e7-496837ecf964. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 835.691447] env[62522]: DEBUG nova.network.neutron [req-4daed375-e8c3-47c7-acb8-8d26b04f0c1f req-12436681-6479-4f6c-a6b4-d2ce37583f41 service nova] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Updating instance_info_cache with network_info: [{"id": "0436d465-681f-4cf5-b0e7-496837ecf964", "address": "fa:16:3e:76:52:a9", "network": {"id": "b9ea9e3f-83bc-463d-af04-cc9483811abe", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1832272461-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23589532e76f45e1aa7c1f48a2022a19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0436d465-68", "ovs_interfaceid": "0436d465-681f-4cf5-b0e7-496837ecf964", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.692574] env[62522]: DEBUG oslo_concurrency.lockutils [req-09597023-f8d9-4b8b-ac0d-0b1c86599d70 req-27c43206-9352-4c5a-987a-e9a626a66043 service nova] Acquired lock "refresh_cache-bf44e269-0297-473e-b6ce-04a40d0ec1b4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.692760] env[62522]: DEBUG nova.network.neutron [req-09597023-f8d9-4b8b-ac0d-0b1c86599d70 req-27c43206-9352-4c5a-987a-e9a626a66043 service nova] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Refreshing network info cache for port 36fe2fd3-3447-4032-8c02-5be9712b769d {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 835.693774] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:5d:25', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '419a5b3f-4c6f-4168-9def-746b4d8c5c24', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '36fe2fd3-3447-4032-8c02-5be9712b769d', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 835.701869] env[62522]: DEBUG oslo.service.loopingcall [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 835.705371] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 835.705371] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-434aa17e-6619-41b5-bd61-b311c2983c0c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.732048] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 835.732048] env[62522]: value = "task-2415519" [ 835.732048] env[62522]: _type = "Task" [ 835.732048] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.745704] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415519, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.931017] env[62522]: DEBUG nova.network.neutron [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Successfully updated port: 36b110ee-cabf-4e98-b183-605196991aec {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 835.970255] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfda96aa-d26d-4346-9fb5-aa73878d3fd3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.979630] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66cf78a4-31f5-43d0-897b-a68b9b0f8fdb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.010047] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cb77537-4222-4308-972f-d32f4cc38d1d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.018697] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae2bba71-bc89-4a50-bf59-cab1aa085a8e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.034709] env[62522]: DEBUG nova.compute.provider_tree [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 836.090103] env[62522]: DEBUG oslo_concurrency.lockutils [None req-beb2e6f5-92e6-4400-8028-221230deb2b9 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "74e663b1-b552-4b71-aa74-308e908d79e7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 105.629s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.118719] env[62522]: DEBUG oslo_vmware.api [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Task: {'id': task-2415518, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.681023} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.119017] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 7a086314-3e49-48e9-82c9-cead8ecb19d1/7a086314-3e49-48e9-82c9-cead8ecb19d1.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 836.119244] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 836.119560] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a05a5dd9-8858-46ba-bfd3-9ba5edea86de {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.126356] env[62522]: DEBUG oslo_vmware.api [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Waiting for the task: (returnval){ [ 836.126356] env[62522]: value = "task-2415520" [ 836.126356] env[62522]: _type = "Task" [ 836.126356] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.134754] env[62522]: DEBUG oslo_vmware.api [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Task: {'id': task-2415520, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.203384] env[62522]: DEBUG oslo_concurrency.lockutils [req-4daed375-e8c3-47c7-acb8-8d26b04f0c1f req-12436681-6479-4f6c-a6b4-d2ce37583f41 service nova] Releasing lock "refresh_cache-7a086314-3e49-48e9-82c9-cead8ecb19d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 836.243463] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415519, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.431159] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Acquiring lock "refresh_cache-41a980df-88a9-4f9b-b34b-905b226c0675" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 836.431311] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Acquired lock "refresh_cache-41a980df-88a9-4f9b-b34b-905b226c0675" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.431468] env[62522]: DEBUG nova.network.neutron [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 836.480627] env[62522]: DEBUG nova.network.neutron [req-09597023-f8d9-4b8b-ac0d-0b1c86599d70 req-27c43206-9352-4c5a-987a-e9a626a66043 service nova] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Updated VIF entry in instance network info cache for port 36fe2fd3-3447-4032-8c02-5be9712b769d. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 836.481033] env[62522]: DEBUG nova.network.neutron [req-09597023-f8d9-4b8b-ac0d-0b1c86599d70 req-27c43206-9352-4c5a-987a-e9a626a66043 service nova] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Updating instance_info_cache with network_info: [{"id": "36fe2fd3-3447-4032-8c02-5be9712b769d", "address": "fa:16:3e:2e:5d:25", "network": {"id": "5f1d73d1-ff9e-4081-87cf-8df6294f67c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-892212702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "962664c996f24cf9ae192f79fae18ca4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "419a5b3f-4c6f-4168-9def-746b4d8c5c24", "external-id": "nsx-vlan-transportzone-656", "segmentation_id": 656, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36fe2fd3-34", "ovs_interfaceid": "36fe2fd3-3447-4032-8c02-5be9712b769d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.537827] env[62522]: DEBUG nova.scheduler.client.report [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 836.593161] env[62522]: DEBUG nova.compute.manager [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 836.635981] env[62522]: DEBUG oslo_vmware.api [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Task: {'id': task-2415520, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077733} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.637413] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 836.638246] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c93c1b11-c26c-48e4-b3e1-765c5666b7fb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.661023] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] 7a086314-3e49-48e9-82c9-cead8ecb19d1/7a086314-3e49-48e9-82c9-cead8ecb19d1.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 836.661898] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8708c49b-d2dd-4c91-916a-f711a8c8164b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.682021] env[62522]: DEBUG oslo_vmware.api [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Waiting for the task: (returnval){ [ 836.682021] env[62522]: value = "task-2415521" [ 836.682021] env[62522]: _type = "Task" [ 836.682021] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.690398] env[62522]: DEBUG oslo_vmware.api [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Task: {'id': task-2415521, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.742869] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415519, 'name': CreateVM_Task, 'duration_secs': 0.520064} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.743062] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 836.743756] env[62522]: DEBUG oslo_concurrency.lockutils [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 836.743916] env[62522]: DEBUG oslo_concurrency.lockutils [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.744268] env[62522]: DEBUG oslo_concurrency.lockutils [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 836.744524] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e805591-440d-42a0-9c8f-5dd13234c8ea {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.749160] env[62522]: DEBUG oslo_vmware.api [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 836.749160] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b06695-3753-54df-be30-72513c3f5a9e" [ 836.749160] env[62522]: _type = "Task" [ 836.749160] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.758685] env[62522]: DEBUG oslo_vmware.api [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b06695-3753-54df-be30-72513c3f5a9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.966683] env[62522]: DEBUG nova.network.neutron [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 836.984656] env[62522]: DEBUG oslo_concurrency.lockutils [req-09597023-f8d9-4b8b-ac0d-0b1c86599d70 req-27c43206-9352-4c5a-987a-e9a626a66043 service nova] Releasing lock "refresh_cache-bf44e269-0297-473e-b6ce-04a40d0ec1b4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.046536] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.848s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.046536] env[62522]: DEBUG nova.compute.manager [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 837.050200] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.704s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.050503] env[62522]: DEBUG nova.objects.instance [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Lazy-loading 'resources' on Instance uuid 566c207c-5506-4410-98ab-aee9fdbc5d6e {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 837.093321] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Acquiring lock "95e4fe36-6830-4fc4-bb53-1e5643c2f95b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.093609] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Lock "95e4fe36-6830-4fc4-bb53-1e5643c2f95b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.093840] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Acquiring lock "95e4fe36-6830-4fc4-bb53-1e5643c2f95b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.094035] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Lock "95e4fe36-6830-4fc4-bb53-1e5643c2f95b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.094211] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Lock "95e4fe36-6830-4fc4-bb53-1e5643c2f95b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.096263] env[62522]: INFO nova.compute.manager [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Terminating instance [ 837.123461] env[62522]: DEBUG nova.network.neutron [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Updating instance_info_cache with network_info: [{"id": "36b110ee-cabf-4e98-b183-605196991aec", "address": "fa:16:3e:3e:f0:91", "network": {"id": "896c53ad-3b58-4e2c-89d9-7fa723dc8e79", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-558196866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "04ba6295b89743a184cc64343ac6bbaf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36b110ee-ca", "ovs_interfaceid": "36b110ee-cabf-4e98-b183-605196991aec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.126059] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.192711] env[62522]: DEBUG oslo_vmware.api [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Task: {'id': task-2415521, 'name': ReconfigVM_Task, 'duration_secs': 0.333619} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.192993] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Reconfigured VM instance instance-00000032 to attach disk [datastore1] 7a086314-3e49-48e9-82c9-cead8ecb19d1/7a086314-3e49-48e9-82c9-cead8ecb19d1.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 837.194121] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7dd4109c-bdcb-4a8c-a1ea-193b0cd7c8af {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.200338] env[62522]: DEBUG oslo_vmware.api [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Waiting for the task: (returnval){ [ 837.200338] env[62522]: value = "task-2415522" [ 837.200338] env[62522]: _type = "Task" [ 837.200338] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.208755] env[62522]: DEBUG oslo_vmware.api [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Task: {'id': task-2415522, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.240904] env[62522]: DEBUG nova.compute.manager [req-f0a4e206-8f52-4b99-916c-9655bb0adc7f req-ef4d1793-e4b2-4cd7-8ca9-e7b4e6865aca service nova] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Received event network-vif-plugged-36b110ee-cabf-4e98-b183-605196991aec {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 837.241153] env[62522]: DEBUG oslo_concurrency.lockutils [req-f0a4e206-8f52-4b99-916c-9655bb0adc7f req-ef4d1793-e4b2-4cd7-8ca9-e7b4e6865aca service nova] Acquiring lock "41a980df-88a9-4f9b-b34b-905b226c0675-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.241369] env[62522]: DEBUG oslo_concurrency.lockutils [req-f0a4e206-8f52-4b99-916c-9655bb0adc7f req-ef4d1793-e4b2-4cd7-8ca9-e7b4e6865aca service nova] Lock "41a980df-88a9-4f9b-b34b-905b226c0675-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.241538] env[62522]: DEBUG oslo_concurrency.lockutils [req-f0a4e206-8f52-4b99-916c-9655bb0adc7f req-ef4d1793-e4b2-4cd7-8ca9-e7b4e6865aca service nova] Lock "41a980df-88a9-4f9b-b34b-905b226c0675-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.241730] env[62522]: DEBUG nova.compute.manager [req-f0a4e206-8f52-4b99-916c-9655bb0adc7f req-ef4d1793-e4b2-4cd7-8ca9-e7b4e6865aca service nova] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] No waiting events found dispatching network-vif-plugged-36b110ee-cabf-4e98-b183-605196991aec {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 837.241929] env[62522]: WARNING nova.compute.manager [req-f0a4e206-8f52-4b99-916c-9655bb0adc7f req-ef4d1793-e4b2-4cd7-8ca9-e7b4e6865aca service nova] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Received unexpected event network-vif-plugged-36b110ee-cabf-4e98-b183-605196991aec for instance with vm_state building and task_state spawning. [ 837.242112] env[62522]: DEBUG nova.compute.manager [req-f0a4e206-8f52-4b99-916c-9655bb0adc7f req-ef4d1793-e4b2-4cd7-8ca9-e7b4e6865aca service nova] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Received event network-changed-36b110ee-cabf-4e98-b183-605196991aec {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 837.242290] env[62522]: DEBUG nova.compute.manager [req-f0a4e206-8f52-4b99-916c-9655bb0adc7f req-ef4d1793-e4b2-4cd7-8ca9-e7b4e6865aca service nova] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Refreshing instance network info cache due to event network-changed-36b110ee-cabf-4e98-b183-605196991aec. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 837.242651] env[62522]: DEBUG oslo_concurrency.lockutils [req-f0a4e206-8f52-4b99-916c-9655bb0adc7f req-ef4d1793-e4b2-4cd7-8ca9-e7b4e6865aca service nova] Acquiring lock "refresh_cache-41a980df-88a9-4f9b-b34b-905b226c0675" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.259833] env[62522]: DEBUG oslo_vmware.api [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b06695-3753-54df-be30-72513c3f5a9e, 'name': SearchDatastore_Task, 'duration_secs': 0.012035} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.259833] env[62522]: DEBUG oslo_concurrency.lockutils [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.260033] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 837.260922] env[62522]: DEBUG oslo_concurrency.lockutils [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.260922] env[62522]: DEBUG oslo_concurrency.lockutils [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.260922] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 837.260922] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7dcc37f6-6d4d-472a-8427-8c7a3e2a61d4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.270859] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 837.270859] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 837.271554] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96cdde8e-6cb1-454e-a749-8708174bc353 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.276751] env[62522]: DEBUG oslo_vmware.api [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 837.276751] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522ed5a4-d9b6-09f7-5124-107a4cb3cf65" [ 837.276751] env[62522]: _type = "Task" [ 837.276751] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.287257] env[62522]: DEBUG oslo_vmware.api [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522ed5a4-d9b6-09f7-5124-107a4cb3cf65, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.463397] env[62522]: DEBUG nova.compute.manager [req-f558884e-4876-4763-b209-5a1eba586c84 req-b3c2133b-a9d0-4d32-accb-e6f5206e9bed service nova] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Received event network-changed-1e118d2e-4933-4fb5-8582-23601144447f {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 837.463614] env[62522]: DEBUG nova.compute.manager [req-f558884e-4876-4763-b209-5a1eba586c84 req-b3c2133b-a9d0-4d32-accb-e6f5206e9bed service nova] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Refreshing instance network info cache due to event network-changed-1e118d2e-4933-4fb5-8582-23601144447f. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 837.463827] env[62522]: DEBUG oslo_concurrency.lockutils [req-f558884e-4876-4763-b209-5a1eba586c84 req-b3c2133b-a9d0-4d32-accb-e6f5206e9bed service nova] Acquiring lock "refresh_cache-76cb551e-e605-4c80-a6ef-e36681fc0bc2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.463971] env[62522]: DEBUG oslo_concurrency.lockutils [req-f558884e-4876-4763-b209-5a1eba586c84 req-b3c2133b-a9d0-4d32-accb-e6f5206e9bed service nova] Acquired lock "refresh_cache-76cb551e-e605-4c80-a6ef-e36681fc0bc2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.464147] env[62522]: DEBUG nova.network.neutron [req-f558884e-4876-4763-b209-5a1eba586c84 req-b3c2133b-a9d0-4d32-accb-e6f5206e9bed service nova] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Refreshing network info cache for port 1e118d2e-4933-4fb5-8582-23601144447f {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 837.544473] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquiring lock "5ed51dce-2a56-4389-acf8-280bd93ff5f0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.544473] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "5ed51dce-2a56-4389-acf8-280bd93ff5f0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.544617] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquiring lock "5ed51dce-2a56-4389-acf8-280bd93ff5f0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.544876] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "5ed51dce-2a56-4389-acf8-280bd93ff5f0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.545123] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "5ed51dce-2a56-4389-acf8-280bd93ff5f0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.547699] env[62522]: INFO nova.compute.manager [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Terminating instance [ 837.550566] env[62522]: DEBUG nova.compute.utils [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 837.553047] env[62522]: DEBUG nova.compute.manager [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 837.553276] env[62522]: DEBUG nova.network.neutron [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 837.600589] env[62522]: DEBUG nova.compute.manager [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 837.601772] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 837.601949] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b42cd67-e9c4-4976-9232-99c6dc4d3e96 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.611298] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 837.611298] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-00e19ae6-16b5-4a7b-95da-81218a26958d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.621317] env[62522]: DEBUG oslo_vmware.api [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Waiting for the task: (returnval){ [ 837.621317] env[62522]: value = "task-2415523" [ 837.621317] env[62522]: _type = "Task" [ 837.621317] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.625306] env[62522]: DEBUG nova.policy [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9694ee575d094ccf845eb57acf3e70c6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '00b27498c07344d1bf9cecefa0fca033', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 837.627551] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Releasing lock "refresh_cache-41a980df-88a9-4f9b-b34b-905b226c0675" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.627906] env[62522]: DEBUG nova.compute.manager [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Instance network_info: |[{"id": "36b110ee-cabf-4e98-b183-605196991aec", "address": "fa:16:3e:3e:f0:91", "network": {"id": "896c53ad-3b58-4e2c-89d9-7fa723dc8e79", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-558196866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "04ba6295b89743a184cc64343ac6bbaf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36b110ee-ca", "ovs_interfaceid": "36b110ee-cabf-4e98-b183-605196991aec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 837.631428] env[62522]: DEBUG oslo_concurrency.lockutils [req-f0a4e206-8f52-4b99-916c-9655bb0adc7f req-ef4d1793-e4b2-4cd7-8ca9-e7b4e6865aca service nova] Acquired lock "refresh_cache-41a980df-88a9-4f9b-b34b-905b226c0675" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.631551] env[62522]: DEBUG nova.network.neutron [req-f0a4e206-8f52-4b99-916c-9655bb0adc7f req-ef4d1793-e4b2-4cd7-8ca9-e7b4e6865aca service nova] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Refreshing network info cache for port 36b110ee-cabf-4e98-b183-605196991aec {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 837.632852] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:f0:91', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '305ccd93-08cb-4658-845c-d9b64952daf7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '36b110ee-cabf-4e98-b183-605196991aec', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 837.641126] env[62522]: DEBUG oslo.service.loopingcall [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 837.648667] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 837.652033] env[62522]: DEBUG oslo_vmware.api [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Task: {'id': task-2415523, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.652033] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d3fc7a6c-526c-43eb-9717-83fc8e8d61aa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.675302] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 837.675302] env[62522]: value = "task-2415524" [ 837.675302] env[62522]: _type = "Task" [ 837.675302] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.684846] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415524, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.712074] env[62522]: DEBUG oslo_vmware.api [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Task: {'id': task-2415522, 'name': Rename_Task, 'duration_secs': 0.143817} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.712298] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 837.712507] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7ac805cc-7517-4439-91da-60741b866cf8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.722558] env[62522]: DEBUG oslo_vmware.api [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Waiting for the task: (returnval){ [ 837.722558] env[62522]: value = "task-2415525" [ 837.722558] env[62522]: _type = "Task" [ 837.722558] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.736993] env[62522]: DEBUG oslo_vmware.api [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Task: {'id': task-2415525, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.790997] env[62522]: DEBUG oslo_vmware.api [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522ed5a4-d9b6-09f7-5124-107a4cb3cf65, 'name': SearchDatastore_Task, 'duration_secs': 0.009423} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.794572] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b5b1d9e-922f-4bf2-b7de-04d665901082 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.803594] env[62522]: DEBUG oslo_vmware.api [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 837.803594] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52926aea-e221-b6b2-6f50-b32dc08d12ca" [ 837.803594] env[62522]: _type = "Task" [ 837.803594] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.820796] env[62522]: DEBUG oslo_vmware.api [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52926aea-e221-b6b2-6f50-b32dc08d12ca, 'name': SearchDatastore_Task, 'duration_secs': 0.014215} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.821129] env[62522]: DEBUG oslo_concurrency.lockutils [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.821412] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] bf44e269-0297-473e-b6ce-04a40d0ec1b4/bf44e269-0297-473e-b6ce-04a40d0ec1b4.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 837.821688] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-31909b5b-1453-48be-a57c-3b1ba571d79b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.829214] env[62522]: DEBUG oslo_vmware.api [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 837.829214] env[62522]: value = "task-2415526" [ 837.829214] env[62522]: _type = "Task" [ 837.829214] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.838922] env[62522]: DEBUG oslo_vmware.api [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415526, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.061202] env[62522]: DEBUG nova.compute.manager [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 838.062013] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 838.062013] env[62522]: DEBUG nova.compute.manager [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 838.065388] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3d4c464-2e00-4338-8ecf-8fec1fdd7d3e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.069269] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquiring lock "74e663b1-b552-4b71-aa74-308e908d79e7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.069633] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "74e663b1-b552-4b71-aa74-308e908d79e7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.069789] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquiring lock "74e663b1-b552-4b71-aa74-308e908d79e7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.072711] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "74e663b1-b552-4b71-aa74-308e908d79e7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.072854] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "74e663b1-b552-4b71-aa74-308e908d79e7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.003s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.078065] env[62522]: INFO nova.compute.manager [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Terminating instance [ 838.084070] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 838.085677] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-11c2612f-42df-4ab6-87e2-7af55edda7fa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.092422] env[62522]: DEBUG nova.network.neutron [req-f0a4e206-8f52-4b99-916c-9655bb0adc7f req-ef4d1793-e4b2-4cd7-8ca9-e7b4e6865aca service nova] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Updated VIF entry in instance network info cache for port 36b110ee-cabf-4e98-b183-605196991aec. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 838.092792] env[62522]: DEBUG nova.network.neutron [req-f0a4e206-8f52-4b99-916c-9655bb0adc7f req-ef4d1793-e4b2-4cd7-8ca9-e7b4e6865aca service nova] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Updating instance_info_cache with network_info: [{"id": "36b110ee-cabf-4e98-b183-605196991aec", "address": "fa:16:3e:3e:f0:91", "network": {"id": "896c53ad-3b58-4e2c-89d9-7fa723dc8e79", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-558196866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "04ba6295b89743a184cc64343ac6bbaf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36b110ee-ca", "ovs_interfaceid": "36b110ee-cabf-4e98-b183-605196991aec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.104518] env[62522]: DEBUG oslo_vmware.api [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 838.104518] env[62522]: value = "task-2415527" [ 838.104518] env[62522]: _type = "Task" [ 838.104518] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.118427] env[62522]: DEBUG oslo_vmware.api [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415527, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.133592] env[62522]: DEBUG oslo_vmware.api [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Task: {'id': task-2415523, 'name': PowerOffVM_Task, 'duration_secs': 0.233108} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.134239] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 838.134239] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 838.134370] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-65c60b21-2bfe-4625-9f0f-195d032d8acd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.188669] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415524, 'name': CreateVM_Task, 'duration_secs': 0.417919} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.188751] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 838.190018] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.190018] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.190018] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 838.191000] env[62522]: DEBUG nova.network.neutron [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Successfully created port: d0636bb2-edb5-41e8-a81d-092c355e770c {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 838.192970] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1a53883-161c-499b-89fc-98b94676918a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.205224] env[62522]: DEBUG oslo_vmware.api [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Waiting for the task: (returnval){ [ 838.205224] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5203f40b-5d1f-b267-c1fa-2337ef45cfd8" [ 838.205224] env[62522]: _type = "Task" [ 838.205224] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.220535] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 838.221877] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 838.221877] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Deleting the datastore file [datastore1] 95e4fe36-6830-4fc4-bb53-1e5643c2f95b {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 838.221877] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7e064a80-4790-4d34-a883-45585f4a051e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.227314] env[62522]: DEBUG oslo_vmware.api [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5203f40b-5d1f-b267-c1fa-2337ef45cfd8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.235485] env[62522]: DEBUG oslo_vmware.api [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Waiting for the task: (returnval){ [ 838.235485] env[62522]: value = "task-2415529" [ 838.235485] env[62522]: _type = "Task" [ 838.235485] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.242840] env[62522]: DEBUG oslo_vmware.api [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Task: {'id': task-2415525, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.249382] env[62522]: DEBUG oslo_vmware.api [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Task: {'id': task-2415529, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.268654] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fd2e760-1a57-4393-b06a-5f83e66cbfc9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.277941] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd6b24a0-f012-4fcc-93c9-9aca9fa1b59e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.283627] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Acquiring lock "76cb551e-e605-4c80-a6ef-e36681fc0bc2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.283923] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Lock "76cb551e-e605-4c80-a6ef-e36681fc0bc2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.284180] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Acquiring lock "76cb551e-e605-4c80-a6ef-e36681fc0bc2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.284373] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Lock "76cb551e-e605-4c80-a6ef-e36681fc0bc2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.284543] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Lock "76cb551e-e605-4c80-a6ef-e36681fc0bc2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.286801] env[62522]: INFO nova.compute.manager [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Terminating instance [ 838.326346] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7c8f5d5-151d-4fb9-a798-bd5e31bb52c3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.339570] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c0dc7dc-e737-40ea-b128-a36b05f7e75d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.348863] env[62522]: DEBUG oslo_vmware.api [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415526, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.362936] env[62522]: DEBUG nova.compute.provider_tree [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 838.404352] env[62522]: DEBUG nova.network.neutron [req-f558884e-4876-4763-b209-5a1eba586c84 req-b3c2133b-a9d0-4d32-accb-e6f5206e9bed service nova] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Updated VIF entry in instance network info cache for port 1e118d2e-4933-4fb5-8582-23601144447f. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 838.405588] env[62522]: DEBUG nova.network.neutron [req-f558884e-4876-4763-b209-5a1eba586c84 req-b3c2133b-a9d0-4d32-accb-e6f5206e9bed service nova] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Updating instance_info_cache with network_info: [{"id": "1e118d2e-4933-4fb5-8582-23601144447f", "address": "fa:16:3e:66:01:ad", "network": {"id": "27951c52-e28e-4c94-968c-c1b5ddd6b58b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1545103257-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ec421e0535f04c2ba17759e8342e1897", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e118d2e-49", "ovs_interfaceid": "1e118d2e-4933-4fb5-8582-23601144447f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.593381] env[62522]: DEBUG nova.compute.manager [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 838.593381] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 838.594403] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2115be1-80df-4765-98d6-2877a8a050f1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.599200] env[62522]: DEBUG oslo_concurrency.lockutils [req-f0a4e206-8f52-4b99-916c-9655bb0adc7f req-ef4d1793-e4b2-4cd7-8ca9-e7b4e6865aca service nova] Releasing lock "refresh_cache-41a980df-88a9-4f9b-b34b-905b226c0675" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.604971] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 838.605512] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8b5093c7-5e8a-4642-8c5d-33adbb51be68 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.617784] env[62522]: DEBUG oslo_vmware.api [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415527, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.619092] env[62522]: DEBUG oslo_vmware.api [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 838.619092] env[62522]: value = "task-2415530" [ 838.619092] env[62522]: _type = "Task" [ 838.619092] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.627325] env[62522]: DEBUG oslo_vmware.api [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415530, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.722151] env[62522]: DEBUG oslo_vmware.api [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5203f40b-5d1f-b267-c1fa-2337ef45cfd8, 'name': SearchDatastore_Task, 'duration_secs': 0.051451} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.722824] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.723189] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 838.723578] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.723844] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.724163] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 838.724503] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ebaa728b-c514-40fc-a1c1-5a5378d19b5b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.739682] env[62522]: DEBUG oslo_vmware.api [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Task: {'id': task-2415525, 'name': PowerOnVM_Task, 'duration_secs': 0.852323} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.739943] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 838.740376] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 838.746081] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 838.746418] env[62522]: INFO nova.compute.manager [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Took 9.28 seconds to spawn the instance on the hypervisor. [ 838.746741] env[62522]: DEBUG nova.compute.manager [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 838.747121] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b72ae67a-90fc-4993-af7d-2cdc5a2089fa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.751766] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db784b1d-b7b3-449b-a60f-4cb6f0be0b5d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.760174] env[62522]: DEBUG oslo_vmware.api [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Waiting for the task: (returnval){ [ 838.760174] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]524e8100-845d-9dcf-9ea7-916ac2ac6223" [ 838.760174] env[62522]: _type = "Task" [ 838.760174] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.763803] env[62522]: DEBUG oslo_vmware.api [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Task: {'id': task-2415529, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.380185} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.769772] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 838.770204] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 838.770204] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 838.770380] env[62522]: INFO nova.compute.manager [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Took 1.17 seconds to destroy the instance on the hypervisor. [ 838.770665] env[62522]: DEBUG oslo.service.loopingcall [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 838.771120] env[62522]: DEBUG nova.compute.manager [-] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 838.771243] env[62522]: DEBUG nova.network.neutron [-] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 838.779574] env[62522]: DEBUG oslo_vmware.api [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]524e8100-845d-9dcf-9ea7-916ac2ac6223, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.791506] env[62522]: DEBUG nova.compute.manager [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 838.791820] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 838.792738] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e172567-a3d2-43aa-8dab-e566bb6a8780 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.802669] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 838.802948] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e44b0eb2-d679-4a31-a17f-9207c0af14b9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.810985] env[62522]: DEBUG oslo_vmware.api [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Waiting for the task: (returnval){ [ 838.810985] env[62522]: value = "task-2415531" [ 838.810985] env[62522]: _type = "Task" [ 838.810985] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.821337] env[62522]: DEBUG oslo_vmware.api [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415531, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.840621] env[62522]: DEBUG oslo_vmware.api [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415526, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.555317} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.840886] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] bf44e269-0297-473e-b6ce-04a40d0ec1b4/bf44e269-0297-473e-b6ce-04a40d0ec1b4.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 838.841381] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 838.841642] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5845dd44-6cef-4482-8f85-78d136c04b30 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.849330] env[62522]: DEBUG oslo_vmware.api [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 838.849330] env[62522]: value = "task-2415532" [ 838.849330] env[62522]: _type = "Task" [ 838.849330] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.858670] env[62522]: DEBUG oslo_vmware.api [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415532, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.866947] env[62522]: DEBUG nova.scheduler.client.report [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 838.909693] env[62522]: DEBUG oslo_concurrency.lockutils [req-f558884e-4876-4763-b209-5a1eba586c84 req-b3c2133b-a9d0-4d32-accb-e6f5206e9bed service nova] Releasing lock "refresh_cache-76cb551e-e605-4c80-a6ef-e36681fc0bc2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.076326] env[62522]: DEBUG nova.compute.manager [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 839.108013] env[62522]: DEBUG nova.virt.hardware [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 839.109734] env[62522]: DEBUG nova.virt.hardware [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 839.109936] env[62522]: DEBUG nova.virt.hardware [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 839.110147] env[62522]: DEBUG nova.virt.hardware [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 839.110302] env[62522]: DEBUG nova.virt.hardware [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 839.110452] env[62522]: DEBUG nova.virt.hardware [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 839.110784] env[62522]: DEBUG nova.virt.hardware [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 839.111015] env[62522]: DEBUG nova.virt.hardware [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 839.111223] env[62522]: DEBUG nova.virt.hardware [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 839.111401] env[62522]: DEBUG nova.virt.hardware [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 839.111581] env[62522]: DEBUG nova.virt.hardware [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 839.112924] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baa8385b-df39-4276-822e-80cfc766fc6b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.131873] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20710842-eb68-4827-80c7-7235dfe113e6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.137286] env[62522]: DEBUG oslo_vmware.api [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415527, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.141227] env[62522]: DEBUG oslo_vmware.api [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415530, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.277964] env[62522]: DEBUG oslo_vmware.api [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]524e8100-845d-9dcf-9ea7-916ac2ac6223, 'name': SearchDatastore_Task, 'duration_secs': 0.023148} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.278797] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ed8524d-1b88-414f-9d8f-38e59a3f3f20 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.289035] env[62522]: INFO nova.compute.manager [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Took 53.74 seconds to build instance. [ 839.289035] env[62522]: DEBUG oslo_vmware.api [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Waiting for the task: (returnval){ [ 839.289035] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521a4ea1-2670-414c-5627-f9c6b05c69d5" [ 839.289035] env[62522]: _type = "Task" [ 839.289035] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.298448] env[62522]: DEBUG oslo_vmware.api [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521a4ea1-2670-414c-5627-f9c6b05c69d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.321409] env[62522]: DEBUG oslo_vmware.api [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415531, 'name': PowerOffVM_Task, 'duration_secs': 0.354666} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.321636] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 839.321849] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 839.322145] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0bad11f5-7ec0-497b-9ba3-9bfa071933cb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.360599] env[62522]: DEBUG oslo_vmware.api [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415532, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.150909} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.360969] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 839.362031] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a9b4397-42f7-44c5-84fc-4cceb88f974e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.380226] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.330s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 839.391587] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] bf44e269-0297-473e-b6ce-04a40d0ec1b4/bf44e269-0297-473e-b6ce-04a40d0ec1b4.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 839.393327] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.046s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 839.393579] env[62522]: DEBUG nova.objects.instance [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lazy-loading 'resources' on Instance uuid d6935c9b-e4cc-47ed-96d5-e485d60382d6 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 839.395887] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d344a9d-7c64-43dd-ae73-1ca36272525d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.415908] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 839.416453] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 839.416453] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Deleting the datastore file [datastore2] 76cb551e-e605-4c80-a6ef-e36681fc0bc2 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 839.417549] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2df3a3b7-9b11-47a5-a7c8-9ed1bb894483 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.420417] env[62522]: INFO nova.scheduler.client.report [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Deleted allocations for instance 566c207c-5506-4410-98ab-aee9fdbc5d6e [ 839.429917] env[62522]: DEBUG oslo_vmware.api [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 839.429917] env[62522]: value = "task-2415535" [ 839.429917] env[62522]: _type = "Task" [ 839.429917] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.432028] env[62522]: DEBUG oslo_vmware.api [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Waiting for the task: (returnval){ [ 839.432028] env[62522]: value = "task-2415534" [ 839.432028] env[62522]: _type = "Task" [ 839.432028] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.446651] env[62522]: DEBUG oslo_vmware.api [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415535, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.446651] env[62522]: DEBUG oslo_vmware.api [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415534, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.629026] env[62522]: DEBUG oslo_vmware.api [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415527, 'name': PowerOffVM_Task, 'duration_secs': 1.202945} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.629026] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 839.629279] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 839.629531] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5eea5823-6ba5-4098-9048-6440f5f6117e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.636440] env[62522]: DEBUG oslo_vmware.api [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415530, 'name': PowerOffVM_Task, 'duration_secs': 0.685335} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.636440] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 839.636440] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 839.637063] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bb50b572-b9a6-4561-9c32-3db6a9a123d2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.672605] env[62522]: DEBUG nova.compute.manager [req-0f2299e7-943f-47a3-be39-43a8d8302c8b req-f1f9457b-9528-47ef-ad97-3a3f41a791a1 service nova] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Received event network-vif-deleted-fd45a2e0-42d5-4bd8-89d5-73200646889d {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 839.672839] env[62522]: INFO nova.compute.manager [req-0f2299e7-943f-47a3-be39-43a8d8302c8b req-f1f9457b-9528-47ef-ad97-3a3f41a791a1 service nova] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Neutron deleted interface fd45a2e0-42d5-4bd8-89d5-73200646889d; detaching it from the instance and deleting it from the info cache [ 839.673021] env[62522]: DEBUG nova.network.neutron [req-0f2299e7-943f-47a3-be39-43a8d8302c8b req-f1f9457b-9528-47ef-ad97-3a3f41a791a1 service nova] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.713343] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 839.713840] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 839.714623] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Deleting the datastore file [datastore2] 5ed51dce-2a56-4389-acf8-280bd93ff5f0 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 839.715132] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0c87e712-67a2-45e2-a0c7-e23cf56747bd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.723923] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 839.724288] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 839.724495] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Deleting the datastore file [datastore2] 74e663b1-b552-4b71-aa74-308e908d79e7 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 839.724796] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ed7cc699-231b-4b42-a3f1-546ff98680c8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.729311] env[62522]: DEBUG oslo_vmware.api [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 839.729311] env[62522]: value = "task-2415538" [ 839.729311] env[62522]: _type = "Task" [ 839.729311] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.734457] env[62522]: DEBUG oslo_vmware.api [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for the task: (returnval){ [ 839.734457] env[62522]: value = "task-2415539" [ 839.734457] env[62522]: _type = "Task" [ 839.734457] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.743347] env[62522]: DEBUG oslo_vmware.api [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415538, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.748757] env[62522]: DEBUG oslo_vmware.api [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415539, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.786781] env[62522]: DEBUG oslo_concurrency.lockutils [None req-699ba112-b182-4e51-9c74-24a590c46323 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Lock "7a086314-3e49-48e9-82c9-cead8ecb19d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 100.973s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 839.800205] env[62522]: DEBUG oslo_vmware.api [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521a4ea1-2670-414c-5627-f9c6b05c69d5, 'name': SearchDatastore_Task, 'duration_secs': 0.014471} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.800592] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.800792] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 41a980df-88a9-4f9b-b34b-905b226c0675/41a980df-88a9-4f9b-b34b-905b226c0675.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 839.801091] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-09959b91-bf8b-4865-b69a-dded30f0c6a0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.807939] env[62522]: DEBUG oslo_vmware.api [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Waiting for the task: (returnval){ [ 839.807939] env[62522]: value = "task-2415540" [ 839.807939] env[62522]: _type = "Task" [ 839.807939] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.817088] env[62522]: DEBUG oslo_vmware.api [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': task-2415540, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.942329] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a6b88fd3-3570-4648-bdf8-53939506e809 tempest-ServerMetadataTestJSON-1051951805 tempest-ServerMetadataTestJSON-1051951805-project-member] Lock "566c207c-5506-4410-98ab-aee9fdbc5d6e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.537s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 839.953363] env[62522]: DEBUG oslo_vmware.api [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415534, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.300234} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.956527] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 839.956764] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 839.956945] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 839.957168] env[62522]: INFO nova.compute.manager [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Took 1.17 seconds to destroy the instance on the hypervisor. [ 839.957482] env[62522]: DEBUG oslo.service.loopingcall [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 839.957751] env[62522]: DEBUG oslo_vmware.api [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415535, 'name': ReconfigVM_Task, 'duration_secs': 0.44908} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.957965] env[62522]: DEBUG nova.compute.manager [-] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 839.958173] env[62522]: DEBUG nova.network.neutron [-] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 839.959827] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Reconfigured VM instance instance-00000033 to attach disk [datastore1] bf44e269-0297-473e-b6ce-04a40d0ec1b4/bf44e269-0297-473e-b6ce-04a40d0ec1b4.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 839.960529] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a6c1ef6a-4b05-44fe-920b-590c0c4274d5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.970648] env[62522]: DEBUG oslo_vmware.api [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 839.970648] env[62522]: value = "task-2415541" [ 839.970648] env[62522]: _type = "Task" [ 839.970648] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.980173] env[62522]: DEBUG oslo_vmware.api [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415541, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.062232] env[62522]: DEBUG nova.network.neutron [-] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.183442] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-abe5e1f9-5f1b-4334-bd93-71163dd6aeec {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.194684] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b6ba305-fe92-425a-a649-629718469789 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.222320] env[62522]: DEBUG nova.network.neutron [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Successfully updated port: d0636bb2-edb5-41e8-a81d-092c355e770c {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 840.226214] env[62522]: DEBUG nova.compute.manager [req-91e7d1e9-624a-4456-aa63-11f258c70b46 req-93fce77e-6cd4-479b-9f77-3f627b20edc5 service nova] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Received event network-vif-plugged-d0636bb2-edb5-41e8-a81d-092c355e770c {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 840.226214] env[62522]: DEBUG oslo_concurrency.lockutils [req-91e7d1e9-624a-4456-aa63-11f258c70b46 req-93fce77e-6cd4-479b-9f77-3f627b20edc5 service nova] Acquiring lock "ed7220fa-fee9-4715-acbb-236682c6729e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.226214] env[62522]: DEBUG oslo_concurrency.lockutils [req-91e7d1e9-624a-4456-aa63-11f258c70b46 req-93fce77e-6cd4-479b-9f77-3f627b20edc5 service nova] Lock "ed7220fa-fee9-4715-acbb-236682c6729e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.226598] env[62522]: DEBUG oslo_concurrency.lockutils [req-91e7d1e9-624a-4456-aa63-11f258c70b46 req-93fce77e-6cd4-479b-9f77-3f627b20edc5 service nova] Lock "ed7220fa-fee9-4715-acbb-236682c6729e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.226754] env[62522]: DEBUG nova.compute.manager [req-91e7d1e9-624a-4456-aa63-11f258c70b46 req-93fce77e-6cd4-479b-9f77-3f627b20edc5 service nova] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] No waiting events found dispatching network-vif-plugged-d0636bb2-edb5-41e8-a81d-092c355e770c {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 840.226883] env[62522]: WARNING nova.compute.manager [req-91e7d1e9-624a-4456-aa63-11f258c70b46 req-93fce77e-6cd4-479b-9f77-3f627b20edc5 service nova] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Received unexpected event network-vif-plugged-d0636bb2-edb5-41e8-a81d-092c355e770c for instance with vm_state building and task_state spawning. [ 840.255964] env[62522]: DEBUG nova.compute.manager [req-0f2299e7-943f-47a3-be39-43a8d8302c8b req-f1f9457b-9528-47ef-ad97-3a3f41a791a1 service nova] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Detach interface failed, port_id=fd45a2e0-42d5-4bd8-89d5-73200646889d, reason: Instance 95e4fe36-6830-4fc4-bb53-1e5643c2f95b could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 840.274555] env[62522]: DEBUG oslo_vmware.api [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415538, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.269977} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.274555] env[62522]: DEBUG oslo_vmware.api [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Task: {'id': task-2415539, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.265009} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.277189] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 840.277396] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 840.277603] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 840.277807] env[62522]: INFO nova.compute.manager [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Took 2.22 seconds to destroy the instance on the hypervisor. [ 840.278091] env[62522]: DEBUG oslo.service.loopingcall [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 840.278358] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 840.278462] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 840.278629] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 840.278794] env[62522]: INFO nova.compute.manager [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Took 1.69 seconds to destroy the instance on the hypervisor. [ 840.279024] env[62522]: DEBUG oslo.service.loopingcall [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 840.279457] env[62522]: DEBUG nova.compute.manager [-] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 840.279586] env[62522]: DEBUG nova.network.neutron [-] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 840.281266] env[62522]: DEBUG nova.compute.manager [-] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 840.281367] env[62522]: DEBUG nova.network.neutron [-] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 840.292410] env[62522]: DEBUG nova.compute.manager [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 840.319091] env[62522]: DEBUG oslo_vmware.api [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': task-2415540, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.489703] env[62522]: DEBUG oslo_vmware.api [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415541, 'name': Rename_Task, 'duration_secs': 0.197239} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.491241] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 840.492997] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d8a1610b-32b2-4ed0-8d14-37cf88e7b468 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.503110] env[62522]: DEBUG oslo_vmware.api [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 840.503110] env[62522]: value = "task-2415542" [ 840.503110] env[62522]: _type = "Task" [ 840.503110] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.524276] env[62522]: DEBUG oslo_vmware.api [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415542, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.567083] env[62522]: INFO nova.compute.manager [-] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Took 1.80 seconds to deallocate network for instance. [ 840.652661] env[62522]: DEBUG oslo_vmware.rw_handles [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52544ed7-e0fc-7852-df21-b4bb521b899c/disk-0.vmdk. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 840.653870] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b108f78-b091-4787-b3b6-50e00fbd73f3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.661686] env[62522]: DEBUG oslo_vmware.rw_handles [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52544ed7-e0fc-7852-df21-b4bb521b899c/disk-0.vmdk is in state: ready. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 840.661786] env[62522]: ERROR oslo_vmware.rw_handles [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52544ed7-e0fc-7852-df21-b4bb521b899c/disk-0.vmdk due to incomplete transfer. [ 840.662020] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c161bc8a-bd33-4821-b3d2-c2a779af68a2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.666223] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce85e1f3-4c2d-4f0e-a108-cff4b2aa4bce {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.678762] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75b7d54b-5f4d-407b-bd6d-fe6113b25241 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.679343] env[62522]: DEBUG oslo_vmware.rw_handles [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52544ed7-e0fc-7852-df21-b4bb521b899c/disk-0.vmdk. {{(pid=62522) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 840.679880] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Uploaded image 68ec4e19-e568-4d42-9b01-c03a649009f7 to the Glance image server {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 840.682227] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Destroying the VM {{(pid=62522) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 840.682495] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-cccbc844-bd9a-4e5f-963f-e56449c6b074 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.721074] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a4eee31-1dce-47cb-a420-171649f7d8d8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.723994] env[62522]: DEBUG oslo_vmware.api [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Waiting for the task: (returnval){ [ 840.723994] env[62522]: value = "task-2415543" [ 840.723994] env[62522]: _type = "Task" [ 840.723994] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.730372] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "refresh_cache-ed7220fa-fee9-4715-acbb-236682c6729e" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.730537] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquired lock "refresh_cache-ed7220fa-fee9-4715-acbb-236682c6729e" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.730736] env[62522]: DEBUG nova.network.neutron [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 840.734804] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efe2db82-6c5a-499d-b3a8-dffcdfc4c66d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.741304] env[62522]: DEBUG oslo_vmware.api [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415543, 'name': Destroy_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.753020] env[62522]: DEBUG nova.compute.provider_tree [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 840.815808] env[62522]: DEBUG oslo_concurrency.lockutils [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.819909] env[62522]: DEBUG oslo_vmware.api [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': task-2415540, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.808191} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.820205] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 41a980df-88a9-4f9b-b34b-905b226c0675/41a980df-88a9-4f9b-b34b-905b226c0675.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 840.820439] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 840.820770] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b3d3d71f-e32e-4848-86dc-1c69d680d710 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.830026] env[62522]: DEBUG oslo_vmware.api [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Waiting for the task: (returnval){ [ 840.830026] env[62522]: value = "task-2415544" [ 840.830026] env[62522]: _type = "Task" [ 840.830026] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.838035] env[62522]: DEBUG oslo_vmware.api [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': task-2415544, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.006857] env[62522]: DEBUG nova.network.neutron [-] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.016801] env[62522]: DEBUG oslo_vmware.api [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415542, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.078176] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.238242] env[62522]: DEBUG oslo_vmware.api [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415543, 'name': Destroy_Task} progress is 33%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.289255] env[62522]: ERROR nova.scheduler.client.report [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [req-67208f07-8d6a-41f0-8200-d8d94fe60465] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c7fa38b2-245d-4337-a012-22c1a01c0a72. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-67208f07-8d6a-41f0-8200-d8d94fe60465"}]} [ 841.297337] env[62522]: DEBUG nova.network.neutron [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 841.306351] env[62522]: DEBUG nova.scheduler.client.report [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Refreshing inventories for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 841.339306] env[62522]: DEBUG nova.scheduler.client.report [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Updating ProviderTree inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 841.339306] env[62522]: DEBUG nova.compute.provider_tree [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 841.344992] env[62522]: DEBUG oslo_vmware.api [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': task-2415544, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065769} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.353053] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 841.353775] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fa1bd16-4fa3-4f60-8431-99e2d6932bb7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.382042] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] 41a980df-88a9-4f9b-b34b-905b226c0675/41a980df-88a9-4f9b-b34b-905b226c0675.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 841.383647] env[62522]: DEBUG nova.scheduler.client.report [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Refreshing aggregate associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, aggregates: None {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 841.386129] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e1215328-5bf9-4274-890a-01d5d95c9587 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.409764] env[62522]: DEBUG oslo_vmware.api [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Waiting for the task: (returnval){ [ 841.409764] env[62522]: value = "task-2415545" [ 841.409764] env[62522]: _type = "Task" [ 841.409764] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.419070] env[62522]: DEBUG oslo_vmware.api [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': task-2415545, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.435827] env[62522]: DEBUG nova.scheduler.client.report [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Refreshing trait associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 841.496089] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Acquiring lock "7a086314-3e49-48e9-82c9-cead8ecb19d1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.496089] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Lock "7a086314-3e49-48e9-82c9-cead8ecb19d1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.496089] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Acquiring lock "7a086314-3e49-48e9-82c9-cead8ecb19d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.496089] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Lock "7a086314-3e49-48e9-82c9-cead8ecb19d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.496332] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Lock "7a086314-3e49-48e9-82c9-cead8ecb19d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.500969] env[62522]: INFO nova.compute.manager [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Terminating instance [ 841.513923] env[62522]: INFO nova.compute.manager [-] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Took 1.55 seconds to deallocate network for instance. [ 841.530197] env[62522]: DEBUG oslo_vmware.api [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415542, 'name': PowerOnVM_Task, 'duration_secs': 0.570728} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.530197] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 841.530197] env[62522]: INFO nova.compute.manager [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Took 9.17 seconds to spawn the instance on the hypervisor. [ 841.530393] env[62522]: DEBUG nova.compute.manager [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 841.531163] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73dedc19-42a5-4d2b-b571-782bdf7faad8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.545749] env[62522]: DEBUG nova.network.neutron [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Updating instance_info_cache with network_info: [{"id": "d0636bb2-edb5-41e8-a81d-092c355e770c", "address": "fa:16:3e:54:6b:e4", "network": {"id": "2037da36-cd13-4cb1-95f4-08d1e174336c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1895994075-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00b27498c07344d1bf9cecefa0fca033", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0636bb2-ed", "ovs_interfaceid": "d0636bb2-edb5-41e8-a81d-092c355e770c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.547230] env[62522]: DEBUG nova.network.neutron [-] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.560987] env[62522]: DEBUG nova.network.neutron [-] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.742716] env[62522]: DEBUG oslo_vmware.api [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415543, 'name': Destroy_Task, 'duration_secs': 0.620623} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.747666] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Destroyed the VM [ 841.748016] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Deleting Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 841.748825] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-6ee6443f-dd55-4deb-ae9a-8c1eefeed568 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.756595] env[62522]: DEBUG oslo_vmware.api [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Waiting for the task: (returnval){ [ 841.756595] env[62522]: value = "task-2415546" [ 841.756595] env[62522]: _type = "Task" [ 841.756595] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.771455] env[62522]: DEBUG oslo_vmware.api [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415546, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.779954] env[62522]: DEBUG nova.compute.manager [req-4c7453b8-d42e-4995-876d-6a3ad3e92856 req-63c46a16-3332-492c-b427-66d19d8dc1b8 service nova] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Received event network-vif-deleted-1e118d2e-4933-4fb5-8582-23601144447f {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 841.921045] env[62522]: DEBUG oslo_vmware.api [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': task-2415545, 'name': ReconfigVM_Task, 'duration_secs': 0.267976} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.921459] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Reconfigured VM instance instance-00000034 to attach disk [datastore1] 41a980df-88a9-4f9b-b34b-905b226c0675/41a980df-88a9-4f9b-b34b-905b226c0675.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 841.923090] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c0f2988b-1639-4195-a558-c26d4f9a65ca {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.937325] env[62522]: DEBUG oslo_vmware.api [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Waiting for the task: (returnval){ [ 841.937325] env[62522]: value = "task-2415547" [ 841.937325] env[62522]: _type = "Task" [ 841.937325] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.946674] env[62522]: DEBUG oslo_vmware.api [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': task-2415547, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.005057] env[62522]: DEBUG nova.compute.manager [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 842.005374] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 842.006609] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81d2d9ab-45a7-4eea-bc98-1fe4a9382700 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.018720] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 842.019042] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a5f1f676-510d-4f5d-9414-51413492bbad {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.025562] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.027806] env[62522]: DEBUG oslo_vmware.api [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Waiting for the task: (returnval){ [ 842.027806] env[62522]: value = "task-2415548" [ 842.027806] env[62522]: _type = "Task" [ 842.027806] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.037348] env[62522]: DEBUG oslo_vmware.api [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Task: {'id': task-2415548, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.054576] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Releasing lock "refresh_cache-ed7220fa-fee9-4715-acbb-236682c6729e" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.054916] env[62522]: DEBUG nova.compute.manager [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Instance network_info: |[{"id": "d0636bb2-edb5-41e8-a81d-092c355e770c", "address": "fa:16:3e:54:6b:e4", "network": {"id": "2037da36-cd13-4cb1-95f4-08d1e174336c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1895994075-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00b27498c07344d1bf9cecefa0fca033", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0636bb2-ed", "ovs_interfaceid": "d0636bb2-edb5-41e8-a81d-092c355e770c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 842.059468] env[62522]: INFO nova.compute.manager [-] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Took 1.78 seconds to deallocate network for instance. [ 842.060078] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:54:6b:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f65996a3-f865-4492-9377-cd14ec8b3aae', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd0636bb2-edb5-41e8-a81d-092c355e770c', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 842.069024] env[62522]: DEBUG oslo.service.loopingcall [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 842.069024] env[62522]: INFO nova.compute.manager [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Took 55.06 seconds to build instance. [ 842.071291] env[62522]: INFO nova.compute.manager [-] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Took 1.79 seconds to deallocate network for instance. [ 842.071545] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 842.078687] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0de5aecf-a04b-4e06-b6ad-403367615fec {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.106901] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 842.106901] env[62522]: value = "task-2415549" [ 842.106901] env[62522]: _type = "Task" [ 842.106901] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.121432] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415549, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.173758] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-266381e4-a61a-4210-9f44-0f174c1b6b7e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.183358] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dae77d70-9c87-47d1-bddc-be70fd947dd5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.217765] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13821ebf-7dfa-4692-8802-04e4237852ea {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.226302] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5974a58b-da3e-4fdc-b79a-6d670e189fe6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.239917] env[62522]: DEBUG nova.compute.provider_tree [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 842.267247] env[62522]: DEBUG oslo_vmware.api [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415546, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.404926] env[62522]: DEBUG nova.compute.manager [req-72ca3782-ae1d-4966-a05d-07b2e250fabd req-abbd3627-2020-42f0-9e89-bb541313c897 service nova] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Received event network-changed-d0636bb2-edb5-41e8-a81d-092c355e770c {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 842.405332] env[62522]: DEBUG nova.compute.manager [req-72ca3782-ae1d-4966-a05d-07b2e250fabd req-abbd3627-2020-42f0-9e89-bb541313c897 service nova] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Refreshing instance network info cache due to event network-changed-d0636bb2-edb5-41e8-a81d-092c355e770c. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 842.405627] env[62522]: DEBUG oslo_concurrency.lockutils [req-72ca3782-ae1d-4966-a05d-07b2e250fabd req-abbd3627-2020-42f0-9e89-bb541313c897 service nova] Acquiring lock "refresh_cache-ed7220fa-fee9-4715-acbb-236682c6729e" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.405890] env[62522]: DEBUG oslo_concurrency.lockutils [req-72ca3782-ae1d-4966-a05d-07b2e250fabd req-abbd3627-2020-42f0-9e89-bb541313c897 service nova] Acquired lock "refresh_cache-ed7220fa-fee9-4715-acbb-236682c6729e" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.406214] env[62522]: DEBUG nova.network.neutron [req-72ca3782-ae1d-4966-a05d-07b2e250fabd req-abbd3627-2020-42f0-9e89-bb541313c897 service nova] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Refreshing network info cache for port d0636bb2-edb5-41e8-a81d-092c355e770c {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 842.447073] env[62522]: DEBUG oslo_vmware.api [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': task-2415547, 'name': Rename_Task, 'duration_secs': 0.160169} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.447418] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 842.447711] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-41bdeb94-6058-4599-9a11-99d9333e8d1f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.453805] env[62522]: DEBUG oslo_vmware.api [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Waiting for the task: (returnval){ [ 842.453805] env[62522]: value = "task-2415550" [ 842.453805] env[62522]: _type = "Task" [ 842.453805] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.461398] env[62522]: DEBUG oslo_vmware.api [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': task-2415550, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.536539] env[62522]: DEBUG oslo_vmware.api [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Task: {'id': task-2415548, 'name': PowerOffVM_Task, 'duration_secs': 0.240101} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.536801] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 842.536961] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 842.537226] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1a713222-4783-4c6c-b1c0-7ab0d729f3b8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.573098] env[62522]: DEBUG oslo_concurrency.lockutils [None req-39defb2c-ab6a-4876-b4d5-c77607409dda tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "bf44e269-0297-473e-b6ce-04a40d0ec1b4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.625s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.598487] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.598822] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 842.599084] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 842.599266] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Deleting the datastore file [datastore1] 7a086314-3e49-48e9-82c9-cead8ecb19d1 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 842.599565] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c5a00ec9-41b7-4a7e-906b-adccb52cd3c1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.602818] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.607310] env[62522]: DEBUG oslo_vmware.api [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Waiting for the task: (returnval){ [ 842.607310] env[62522]: value = "task-2415552" [ 842.607310] env[62522]: _type = "Task" [ 842.607310] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.619790] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415549, 'name': CreateVM_Task, 'duration_secs': 0.347222} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.623724] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 842.624120] env[62522]: DEBUG oslo_vmware.api [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Task: {'id': task-2415552, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.624980] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.625264] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.625721] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 842.626046] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0a45924-96c8-40b1-af10-a0d08fa8b62f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.631175] env[62522]: DEBUG oslo_vmware.api [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 842.631175] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521fead0-2afb-c3c8-def8-34d8b90f0f22" [ 842.631175] env[62522]: _type = "Task" [ 842.631175] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.639406] env[62522]: DEBUG oslo_vmware.api [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521fead0-2afb-c3c8-def8-34d8b90f0f22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.765484] env[62522]: DEBUG oslo_vmware.api [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415546, 'name': RemoveSnapshot_Task, 'duration_secs': 0.936622} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.765807] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Deleted Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 842.766060] env[62522]: INFO nova.compute.manager [None req-c1b66acf-901f-4cc9-bc97-d03b2369466e tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Took 15.67 seconds to snapshot the instance on the hypervisor. [ 842.777199] env[62522]: DEBUG nova.scheduler.client.report [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Updated inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with generation 80 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 842.777199] env[62522]: DEBUG nova.compute.provider_tree [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Updating resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 generation from 80 to 81 during operation: update_inventory {{(pid=62522) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 842.777364] env[62522]: DEBUG nova.compute.provider_tree [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 842.963887] env[62522]: DEBUG oslo_vmware.api [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': task-2415550, 'name': PowerOnVM_Task, 'duration_secs': 0.455469} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.966228] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 842.966444] env[62522]: INFO nova.compute.manager [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Took 7.77 seconds to spawn the instance on the hypervisor. [ 842.966627] env[62522]: DEBUG nova.compute.manager [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 842.967446] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd2e2639-f929-42a6-9546-fb1dcd1d8533 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.076352] env[62522]: DEBUG nova.compute.manager [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 843.122677] env[62522]: DEBUG oslo_vmware.api [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Task: {'id': task-2415552, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135965} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.123798] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 843.124008] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 843.124201] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 843.124380] env[62522]: INFO nova.compute.manager [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Took 1.12 seconds to destroy the instance on the hypervisor. [ 843.124657] env[62522]: DEBUG oslo.service.loopingcall [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 843.125685] env[62522]: DEBUG nova.compute.manager [-] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 843.125796] env[62522]: DEBUG nova.network.neutron [-] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 843.142341] env[62522]: DEBUG oslo_vmware.api [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521fead0-2afb-c3c8-def8-34d8b90f0f22, 'name': SearchDatastore_Task, 'duration_secs': 0.008678} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.142625] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.142853] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 843.143102] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 843.143253] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.143431] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 843.143684] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d82c268d-3b2a-4d85-877f-e6c7b42b7081 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.151743] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 843.151902] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 843.152627] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a508635b-a30f-452a-b134-ab9e5d4105db {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.157385] env[62522]: DEBUG oslo_vmware.api [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 843.157385] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52fe7147-0cf6-ea31-4669-80cd0103d5d6" [ 843.157385] env[62522]: _type = "Task" [ 843.157385] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.165297] env[62522]: DEBUG oslo_vmware.api [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52fe7147-0cf6-ea31-4669-80cd0103d5d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.283765] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.890s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.286763] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.366s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.287140] env[62522]: DEBUG nova.objects.instance [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Lazy-loading 'resources' on Instance uuid 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 843.313319] env[62522]: INFO nova.scheduler.client.report [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Deleted allocations for instance d6935c9b-e4cc-47ed-96d5-e485d60382d6 [ 843.488201] env[62522]: INFO nova.compute.manager [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Took 48.42 seconds to build instance. [ 843.542287] env[62522]: DEBUG nova.network.neutron [req-72ca3782-ae1d-4966-a05d-07b2e250fabd req-abbd3627-2020-42f0-9e89-bb541313c897 service nova] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Updated VIF entry in instance network info cache for port d0636bb2-edb5-41e8-a81d-092c355e770c. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 843.542726] env[62522]: DEBUG nova.network.neutron [req-72ca3782-ae1d-4966-a05d-07b2e250fabd req-abbd3627-2020-42f0-9e89-bb541313c897 service nova] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Updating instance_info_cache with network_info: [{"id": "d0636bb2-edb5-41e8-a81d-092c355e770c", "address": "fa:16:3e:54:6b:e4", "network": {"id": "2037da36-cd13-4cb1-95f4-08d1e174336c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1895994075-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00b27498c07344d1bf9cecefa0fca033", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0636bb2-ed", "ovs_interfaceid": "d0636bb2-edb5-41e8-a81d-092c355e770c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.603851] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.670295] env[62522]: DEBUG oslo_vmware.api [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52fe7147-0cf6-ea31-4669-80cd0103d5d6, 'name': SearchDatastore_Task, 'duration_secs': 0.00838} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.671081] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a71ce3d2-752a-4fe5-bf4d-2074f55abfd9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.680201] env[62522]: DEBUG oslo_vmware.api [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 843.680201] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c139b5-fe19-c851-d67b-6e9055b42fac" [ 843.680201] env[62522]: _type = "Task" [ 843.680201] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.689438] env[62522]: DEBUG oslo_vmware.api [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c139b5-fe19-c851-d67b-6e9055b42fac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.826132] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e5317c45-0bec-440d-b451-aeb0fdbafbdf tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lock "d6935c9b-e4cc-47ed-96d5-e485d60382d6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.355s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.956953] env[62522]: DEBUG oslo_concurrency.lockutils [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Acquiring lock "ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.957222] env[62522]: DEBUG oslo_concurrency.lockutils [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Lock "ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.990149] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f1cc7ba9-a287-47fa-ba13-4ebafe00e47d tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Lock "41a980df-88a9-4f9b-b34b-905b226c0675" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.849s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.046703] env[62522]: DEBUG oslo_concurrency.lockutils [req-72ca3782-ae1d-4966-a05d-07b2e250fabd req-abbd3627-2020-42f0-9e89-bb541313c897 service nova] Releasing lock "refresh_cache-ed7220fa-fee9-4715-acbb-236682c6729e" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.046703] env[62522]: DEBUG nova.compute.manager [req-72ca3782-ae1d-4966-a05d-07b2e250fabd req-abbd3627-2020-42f0-9e89-bb541313c897 service nova] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Received event network-vif-deleted-71039daa-ce8b-462d-b9f3-8e07f9ec2666 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 844.046703] env[62522]: DEBUG nova.compute.manager [req-72ca3782-ae1d-4966-a05d-07b2e250fabd req-abbd3627-2020-42f0-9e89-bb541313c897 service nova] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Received event network-vif-deleted-680f73c8-8196-4790-84fe-eb56b69413df {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 844.191939] env[62522]: DEBUG oslo_vmware.api [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c139b5-fe19-c851-d67b-6e9055b42fac, 'name': SearchDatastore_Task, 'duration_secs': 0.010265} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.192190] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.192686] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] ed7220fa-fee9-4715-acbb-236682c6729e/ed7220fa-fee9-4715-acbb-236682c6729e.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 844.192761] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-67f45f97-5964-425b-b2bb-2e63264994cf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.198880] env[62522]: DEBUG oslo_vmware.api [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 844.198880] env[62522]: value = "task-2415553" [ 844.198880] env[62522]: _type = "Task" [ 844.198880] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.210089] env[62522]: DEBUG oslo_vmware.api [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415553, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.291511] env[62522]: DEBUG nova.network.neutron [-] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.377265] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cd59cd3-b452-469b-9c69-d41edf5ea005 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.385540] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f1b1ec6-9159-415a-b42c-c2ba38cae30a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.419763] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d0f38f0-4d61-4f32-9737-93b0661534f5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.429109] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd4dd33-6158-4565-a827-e05e51453d67 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.444222] env[62522]: DEBUG nova.compute.provider_tree [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 844.494108] env[62522]: DEBUG nova.compute.manager [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 844.642842] env[62522]: DEBUG oslo_concurrency.lockutils [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Acquiring lock "72e054d2-79bb-4ef8-82d1-4e67ba0ef20a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.643120] env[62522]: DEBUG oslo_concurrency.lockutils [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Lock "72e054d2-79bb-4ef8-82d1-4e67ba0ef20a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.714573] env[62522]: DEBUG oslo_vmware.api [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415553, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.491497} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.714573] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] ed7220fa-fee9-4715-acbb-236682c6729e/ed7220fa-fee9-4715-acbb-236682c6729e.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 844.714573] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 844.714573] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e3494988-bd7b-4cb9-be46-13f1afc6a1d7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.719929] env[62522]: DEBUG oslo_vmware.api [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 844.719929] env[62522]: value = "task-2415554" [ 844.719929] env[62522]: _type = "Task" [ 844.719929] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.726061] env[62522]: DEBUG oslo_vmware.api [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415554, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.796366] env[62522]: INFO nova.compute.manager [-] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Took 1.67 seconds to deallocate network for instance. [ 844.951023] env[62522]: DEBUG nova.scheduler.client.report [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 845.029623] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.125504] env[62522]: DEBUG nova.compute.manager [req-0f1a5b11-760f-4b44-8bdb-df2e43f02f27 req-e9bf19ba-f1f6-4787-9ec9-bc6accc96408 service nova] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Received event network-vif-deleted-0436d465-681f-4cf5-b0e7-496837ecf964 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 845.125715] env[62522]: DEBUG nova.compute.manager [req-0f1a5b11-760f-4b44-8bdb-df2e43f02f27 req-e9bf19ba-f1f6-4787-9ec9-bc6accc96408 service nova] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Received event network-changed-36fe2fd3-3447-4032-8c02-5be9712b769d {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 845.125883] env[62522]: DEBUG nova.compute.manager [req-0f1a5b11-760f-4b44-8bdb-df2e43f02f27 req-e9bf19ba-f1f6-4787-9ec9-bc6accc96408 service nova] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Refreshing instance network info cache due to event network-changed-36fe2fd3-3447-4032-8c02-5be9712b769d. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 845.126114] env[62522]: DEBUG oslo_concurrency.lockutils [req-0f1a5b11-760f-4b44-8bdb-df2e43f02f27 req-e9bf19ba-f1f6-4787-9ec9-bc6accc96408 service nova] Acquiring lock "refresh_cache-bf44e269-0297-473e-b6ce-04a40d0ec1b4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.126297] env[62522]: DEBUG oslo_concurrency.lockutils [req-0f1a5b11-760f-4b44-8bdb-df2e43f02f27 req-e9bf19ba-f1f6-4787-9ec9-bc6accc96408 service nova] Acquired lock "refresh_cache-bf44e269-0297-473e-b6ce-04a40d0ec1b4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.126410] env[62522]: DEBUG nova.network.neutron [req-0f1a5b11-760f-4b44-8bdb-df2e43f02f27 req-e9bf19ba-f1f6-4787-9ec9-bc6accc96408 service nova] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Refreshing network info cache for port 36fe2fd3-3447-4032-8c02-5be9712b769d {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 845.229236] env[62522]: DEBUG oslo_vmware.api [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415554, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.283174} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.229600] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 845.230484] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc0cdb16-de82-4efd-a1c4-c3e7c8ec8d20 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.255087] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] ed7220fa-fee9-4715-acbb-236682c6729e/ed7220fa-fee9-4715-acbb-236682c6729e.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 845.255087] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd8dd1cb-0288-44f7-9383-95c3e6be8fa7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.273921] env[62522]: DEBUG oslo_vmware.api [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 845.273921] env[62522]: value = "task-2415555" [ 845.273921] env[62522]: _type = "Task" [ 845.273921] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.282114] env[62522]: DEBUG oslo_vmware.api [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415555, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.303213] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.407185] env[62522]: DEBUG nova.compute.manager [req-18b53724-2310-45d2-b792-ef0e6a8b4ac3 req-a0aa02bb-cd8f-4728-a19d-73c30f25b71d service nova] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Received event network-changed-e44d8202-0840-41f3-a86d-8baffc8c19dd {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 845.407185] env[62522]: DEBUG nova.compute.manager [req-18b53724-2310-45d2-b792-ef0e6a8b4ac3 req-a0aa02bb-cd8f-4728-a19d-73c30f25b71d service nova] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Refreshing instance network info cache due to event network-changed-e44d8202-0840-41f3-a86d-8baffc8c19dd. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 845.407185] env[62522]: DEBUG oslo_concurrency.lockutils [req-18b53724-2310-45d2-b792-ef0e6a8b4ac3 req-a0aa02bb-cd8f-4728-a19d-73c30f25b71d service nova] Acquiring lock "refresh_cache-c1fd078c-61d4-4c0f-8c49-0f56a926a087" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.407185] env[62522]: DEBUG oslo_concurrency.lockutils [req-18b53724-2310-45d2-b792-ef0e6a8b4ac3 req-a0aa02bb-cd8f-4728-a19d-73c30f25b71d service nova] Acquired lock "refresh_cache-c1fd078c-61d4-4c0f-8c49-0f56a926a087" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.407185] env[62522]: DEBUG nova.network.neutron [req-18b53724-2310-45d2-b792-ef0e6a8b4ac3 req-a0aa02bb-cd8f-4728-a19d-73c30f25b71d service nova] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Refreshing network info cache for port e44d8202-0840-41f3-a86d-8baffc8c19dd {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 845.457664] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.171s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.460151] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 43.361s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.460396] env[62522]: DEBUG nova.objects.instance [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lazy-loading 'resources' on Instance uuid 713dd924-1c96-496a-bd06-cf0235dd6f75 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 845.479967] env[62522]: INFO nova.scheduler.client.report [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Deleted allocations for instance 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a [ 845.784490] env[62522]: DEBUG oslo_vmware.api [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415555, 'name': ReconfigVM_Task, 'duration_secs': 0.290836} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.784872] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Reconfigured VM instance instance-00000035 to attach disk [datastore1] ed7220fa-fee9-4715-acbb-236682c6729e/ed7220fa-fee9-4715-acbb-236682c6729e.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 845.785576] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ee03a713-53c9-4551-98d5-cf2f933ee42a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.792009] env[62522]: DEBUG oslo_vmware.api [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 845.792009] env[62522]: value = "task-2415556" [ 845.792009] env[62522]: _type = "Task" [ 845.792009] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.800224] env[62522]: DEBUG oslo_vmware.api [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415556, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.869738] env[62522]: DEBUG nova.network.neutron [req-0f1a5b11-760f-4b44-8bdb-df2e43f02f27 req-e9bf19ba-f1f6-4787-9ec9-bc6accc96408 service nova] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Updated VIF entry in instance network info cache for port 36fe2fd3-3447-4032-8c02-5be9712b769d. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 845.870257] env[62522]: DEBUG nova.network.neutron [req-0f1a5b11-760f-4b44-8bdb-df2e43f02f27 req-e9bf19ba-f1f6-4787-9ec9-bc6accc96408 service nova] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Updating instance_info_cache with network_info: [{"id": "36fe2fd3-3447-4032-8c02-5be9712b769d", "address": "fa:16:3e:2e:5d:25", "network": {"id": "5f1d73d1-ff9e-4081-87cf-8df6294f67c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-892212702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "962664c996f24cf9ae192f79fae18ca4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "419a5b3f-4c6f-4168-9def-746b4d8c5c24", "external-id": "nsx-vlan-transportzone-656", "segmentation_id": 656, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36fe2fd3-34", "ovs_interfaceid": "36fe2fd3-3447-4032-8c02-5be9712b769d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.990512] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a708a769-4218-47b5-b42f-1a957641c5b4 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Lock "7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 49.186s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.154969] env[62522]: DEBUG nova.network.neutron [req-18b53724-2310-45d2-b792-ef0e6a8b4ac3 req-a0aa02bb-cd8f-4728-a19d-73c30f25b71d service nova] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Updated VIF entry in instance network info cache for port e44d8202-0840-41f3-a86d-8baffc8c19dd. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 846.155345] env[62522]: DEBUG nova.network.neutron [req-18b53724-2310-45d2-b792-ef0e6a8b4ac3 req-a0aa02bb-cd8f-4728-a19d-73c30f25b71d service nova] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Updating instance_info_cache with network_info: [{"id": "e44d8202-0840-41f3-a86d-8baffc8c19dd", "address": "fa:16:3e:bc:f2:43", "network": {"id": "896c53ad-3b58-4e2c-89d9-7fa723dc8e79", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-558196866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "04ba6295b89743a184cc64343ac6bbaf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape44d8202-08", "ovs_interfaceid": "e44d8202-0840-41f3-a86d-8baffc8c19dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.302297] env[62522]: DEBUG oslo_vmware.api [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415556, 'name': Rename_Task, 'duration_secs': 0.136001} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.302576] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 846.302833] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8f3dd6df-44e5-49e5-a527-14d852a584e3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.313468] env[62522]: DEBUG oslo_vmware.api [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 846.313468] env[62522]: value = "task-2415557" [ 846.313468] env[62522]: _type = "Task" [ 846.313468] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.324216] env[62522]: DEBUG oslo_vmware.api [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415557, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.374096] env[62522]: DEBUG oslo_concurrency.lockutils [req-0f1a5b11-760f-4b44-8bdb-df2e43f02f27 req-e9bf19ba-f1f6-4787-9ec9-bc6accc96408 service nova] Releasing lock "refresh_cache-bf44e269-0297-473e-b6ce-04a40d0ec1b4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.481198] env[62522]: DEBUG oslo_concurrency.lockutils [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquiring lock "0d36b844-554e-46e7-9cf9-ef04b67e8898" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.481586] env[62522]: DEBUG oslo_concurrency.lockutils [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lock "0d36b844-554e-46e7-9cf9-ef04b67e8898" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.481801] env[62522]: DEBUG oslo_concurrency.lockutils [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquiring lock "0d36b844-554e-46e7-9cf9-ef04b67e8898-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.482064] env[62522]: DEBUG oslo_concurrency.lockutils [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lock "0d36b844-554e-46e7-9cf9-ef04b67e8898-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.482304] env[62522]: DEBUG oslo_concurrency.lockutils [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lock "0d36b844-554e-46e7-9cf9-ef04b67e8898-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.485687] env[62522]: INFO nova.compute.manager [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Terminating instance [ 846.529959] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aabc1e3-1167-4479-a22f-124abf26d446 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.538822] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05eb9baa-600d-4229-840b-5768e6cebd87 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.575768] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adeba9ab-b0de-4371-8ab2-2f05c537b443 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.583657] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a89deb-2846-4e19-9006-ec88d588ff8c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.597286] env[62522]: DEBUG nova.compute.provider_tree [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 846.658561] env[62522]: DEBUG oslo_concurrency.lockutils [req-18b53724-2310-45d2-b792-ef0e6a8b4ac3 req-a0aa02bb-cd8f-4728-a19d-73c30f25b71d service nova] Releasing lock "refresh_cache-c1fd078c-61d4-4c0f-8c49-0f56a926a087" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.824090] env[62522]: DEBUG oslo_vmware.api [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415557, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.910131] env[62522]: DEBUG nova.compute.manager [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 846.911048] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6f78e47-ee5f-42b1-ba8c-81bdac096935 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.990472] env[62522]: DEBUG nova.compute.manager [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 846.990823] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 846.992488] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc628de5-866e-4dc0-bf55-e889113793a7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.999848] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 847.000107] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-57222f62-c77f-49b2-9679-616e9c3c881e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.006166] env[62522]: DEBUG oslo_vmware.api [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for the task: (returnval){ [ 847.006166] env[62522]: value = "task-2415558" [ 847.006166] env[62522]: _type = "Task" [ 847.006166] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.014535] env[62522]: DEBUG oslo_vmware.api [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415558, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.103408] env[62522]: DEBUG nova.scheduler.client.report [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 847.326782] env[62522]: DEBUG oslo_vmware.api [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415557, 'name': PowerOnVM_Task, 'duration_secs': 0.698999} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.327075] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 847.327285] env[62522]: INFO nova.compute.manager [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Took 8.25 seconds to spawn the instance on the hypervisor. [ 847.327463] env[62522]: DEBUG nova.compute.manager [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 847.328268] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8a73ca2-b31f-4e57-8a7e-e5585da105b1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.423309] env[62522]: INFO nova.compute.manager [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] instance snapshotting [ 847.425651] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eae6874e-2ea7-4793-83c5-8300e16e45ed {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.452325] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2017b7f3-9a43-41c8-82e7-928ff47a21aa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.515614] env[62522]: DEBUG oslo_vmware.api [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415558, 'name': PowerOffVM_Task, 'duration_secs': 0.481667} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.515893] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 847.516097] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 847.516355] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-92a2ab39-555b-4c18-8773-dead292ff1fc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.593977] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 847.594237] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 847.594423] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Deleting the datastore file [datastore1] 0d36b844-554e-46e7-9cf9-ef04b67e8898 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 847.594744] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d2c78513-f2c4-42c0-8a16-7cd0662f93d4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.602491] env[62522]: DEBUG oslo_vmware.api [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for the task: (returnval){ [ 847.602491] env[62522]: value = "task-2415560" [ 847.602491] env[62522]: _type = "Task" [ 847.602491] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.608363] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.148s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.616848] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.876s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.617041] env[62522]: DEBUG nova.objects.instance [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Lazy-loading 'resources' on Instance uuid ae3e55b8-00c1-4dae-9276-f46a1e17b80e {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 847.618377] env[62522]: DEBUG oslo_vmware.api [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415560, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.620902] env[62522]: DEBUG nova.compute.manager [req-c4e5f594-1c66-4e34-b91e-8a3a9f85fa6e req-1d2f919f-6d00-49c6-8a1b-6507e14dd66f service nova] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Received event network-changed-e44d8202-0840-41f3-a86d-8baffc8c19dd {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 847.621102] env[62522]: DEBUG nova.compute.manager [req-c4e5f594-1c66-4e34-b91e-8a3a9f85fa6e req-1d2f919f-6d00-49c6-8a1b-6507e14dd66f service nova] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Refreshing instance network info cache due to event network-changed-e44d8202-0840-41f3-a86d-8baffc8c19dd. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 847.621317] env[62522]: DEBUG oslo_concurrency.lockutils [req-c4e5f594-1c66-4e34-b91e-8a3a9f85fa6e req-1d2f919f-6d00-49c6-8a1b-6507e14dd66f service nova] Acquiring lock "refresh_cache-c1fd078c-61d4-4c0f-8c49-0f56a926a087" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 847.621467] env[62522]: DEBUG oslo_concurrency.lockutils [req-c4e5f594-1c66-4e34-b91e-8a3a9f85fa6e req-1d2f919f-6d00-49c6-8a1b-6507e14dd66f service nova] Acquired lock "refresh_cache-c1fd078c-61d4-4c0f-8c49-0f56a926a087" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.621620] env[62522]: DEBUG nova.network.neutron [req-c4e5f594-1c66-4e34-b91e-8a3a9f85fa6e req-1d2f919f-6d00-49c6-8a1b-6507e14dd66f service nova] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Refreshing network info cache for port e44d8202-0840-41f3-a86d-8baffc8c19dd {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 847.636668] env[62522]: INFO nova.scheduler.client.report [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Deleted allocations for instance 713dd924-1c96-496a-bd06-cf0235dd6f75 [ 847.846912] env[62522]: INFO nova.compute.manager [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Took 49.36 seconds to build instance. [ 847.963535] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Creating Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 847.963878] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-660b2838-cda4-4840-a80a-a2fea9e63094 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.971902] env[62522]: DEBUG oslo_vmware.api [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Waiting for the task: (returnval){ [ 847.971902] env[62522]: value = "task-2415561" [ 847.971902] env[62522]: _type = "Task" [ 847.971902] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.980453] env[62522]: DEBUG oslo_vmware.api [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415561, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.112870] env[62522]: DEBUG oslo_vmware.api [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415560, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.198169} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.113165] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 848.113357] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 848.113539] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 848.113745] env[62522]: INFO nova.compute.manager [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Took 1.12 seconds to destroy the instance on the hypervisor. [ 848.114039] env[62522]: DEBUG oslo.service.loopingcall [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 848.114266] env[62522]: DEBUG nova.compute.manager [-] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 848.114372] env[62522]: DEBUG nova.network.neutron [-] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 848.145858] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b3d51795-8efe-4588-a400-b2f31514635c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "713dd924-1c96-496a-bd06-cf0235dd6f75" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 49.534s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.352116] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5c766b8f-0b38-4fe8-a4bd-e8778d670b14 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "ed7220fa-fee9-4715-acbb-236682c6729e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.617s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.488115] env[62522]: DEBUG oslo_vmware.api [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415561, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.530743] env[62522]: DEBUG nova.network.neutron [req-c4e5f594-1c66-4e34-b91e-8a3a9f85fa6e req-1d2f919f-6d00-49c6-8a1b-6507e14dd66f service nova] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Updated VIF entry in instance network info cache for port e44d8202-0840-41f3-a86d-8baffc8c19dd. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 848.534025] env[62522]: DEBUG nova.network.neutron [req-c4e5f594-1c66-4e34-b91e-8a3a9f85fa6e req-1d2f919f-6d00-49c6-8a1b-6507e14dd66f service nova] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Updating instance_info_cache with network_info: [{"id": "e44d8202-0840-41f3-a86d-8baffc8c19dd", "address": "fa:16:3e:bc:f2:43", "network": {"id": "896c53ad-3b58-4e2c-89d9-7fa723dc8e79", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-558196866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "04ba6295b89743a184cc64343ac6bbaf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape44d8202-08", "ovs_interfaceid": "e44d8202-0840-41f3-a86d-8baffc8c19dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.816196] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c694ed4-8264-4468-9c11-ecc0e9f7dfb4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.825313] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e08ce437-b913-4bd6-89cb-040230aca6a0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.860448] env[62522]: DEBUG nova.compute.manager [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 848.863807] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-316a19ed-b8b5-4642-984e-4e5c539ee691 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.873658] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2711b4c-4c49-401c-bbff-660f2db03e9a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.887566] env[62522]: DEBUG nova.compute.provider_tree [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 848.940375] env[62522]: DEBUG nova.compute.manager [req-8b8ac729-07fc-44f5-8934-7e8abb6ffaf7 req-9c906942-9d5c-4bd5-8a05-a4f77bc221e2 service nova] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Received event network-vif-deleted-fa2a1b8f-4097-4665-a83e-74536b00779c {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 848.940594] env[62522]: INFO nova.compute.manager [req-8b8ac729-07fc-44f5-8934-7e8abb6ffaf7 req-9c906942-9d5c-4bd5-8a05-a4f77bc221e2 service nova] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Neutron deleted interface fa2a1b8f-4097-4665-a83e-74536b00779c; detaching it from the instance and deleting it from the info cache [ 848.941434] env[62522]: DEBUG nova.network.neutron [req-8b8ac729-07fc-44f5-8934-7e8abb6ffaf7 req-9c906942-9d5c-4bd5-8a05-a4f77bc221e2 service nova] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.984508] env[62522]: DEBUG oslo_vmware.api [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415561, 'name': CreateSnapshot_Task, 'duration_secs': 0.606973} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.984508] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Created Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 848.985101] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-880a00f1-2f36-4e40-8f37-3c55563269cd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.030480] env[62522]: DEBUG nova.network.neutron [-] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.033572] env[62522]: DEBUG oslo_concurrency.lockutils [req-c4e5f594-1c66-4e34-b91e-8a3a9f85fa6e req-1d2f919f-6d00-49c6-8a1b-6507e14dd66f service nova] Releasing lock "refresh_cache-c1fd078c-61d4-4c0f-8c49-0f56a926a087" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.033821] env[62522]: DEBUG nova.compute.manager [req-c4e5f594-1c66-4e34-b91e-8a3a9f85fa6e req-1d2f919f-6d00-49c6-8a1b-6507e14dd66f service nova] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Received event network-changed-36b110ee-cabf-4e98-b183-605196991aec {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 849.033991] env[62522]: DEBUG nova.compute.manager [req-c4e5f594-1c66-4e34-b91e-8a3a9f85fa6e req-1d2f919f-6d00-49c6-8a1b-6507e14dd66f service nova] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Refreshing instance network info cache due to event network-changed-36b110ee-cabf-4e98-b183-605196991aec. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 849.034226] env[62522]: DEBUG oslo_concurrency.lockutils [req-c4e5f594-1c66-4e34-b91e-8a3a9f85fa6e req-1d2f919f-6d00-49c6-8a1b-6507e14dd66f service nova] Acquiring lock "refresh_cache-41a980df-88a9-4f9b-b34b-905b226c0675" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.034372] env[62522]: DEBUG oslo_concurrency.lockutils [req-c4e5f594-1c66-4e34-b91e-8a3a9f85fa6e req-1d2f919f-6d00-49c6-8a1b-6507e14dd66f service nova] Acquired lock "refresh_cache-41a980df-88a9-4f9b-b34b-905b226c0675" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.034533] env[62522]: DEBUG nova.network.neutron [req-c4e5f594-1c66-4e34-b91e-8a3a9f85fa6e req-1d2f919f-6d00-49c6-8a1b-6507e14dd66f service nova] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Refreshing network info cache for port 36b110ee-cabf-4e98-b183-605196991aec {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 849.388780] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.391726] env[62522]: DEBUG nova.scheduler.client.report [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 849.443904] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1d4ae908-5fff-4933-9f71-a923a68f1ffb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.458123] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40503d9f-3551-489a-87ca-d4309690d197 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.503476] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Creating linked-clone VM from snapshot {{(pid=62522) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 849.503872] env[62522]: DEBUG nova.compute.manager [req-8b8ac729-07fc-44f5-8934-7e8abb6ffaf7 req-9c906942-9d5c-4bd5-8a05-a4f77bc221e2 service nova] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Detach interface failed, port_id=fa2a1b8f-4097-4665-a83e-74536b00779c, reason: Instance 0d36b844-554e-46e7-9cf9-ef04b67e8898 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 849.504479] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Acquiring lock "41a980df-88a9-4f9b-b34b-905b226c0675" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.504771] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Lock "41a980df-88a9-4f9b-b34b-905b226c0675" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.504911] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Acquiring lock "41a980df-88a9-4f9b-b34b-905b226c0675-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.505105] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Lock "41a980df-88a9-4f9b-b34b-905b226c0675-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.505275] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Lock "41a980df-88a9-4f9b-b34b-905b226c0675-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.506871] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-5537e917-5349-47c4-aa08-543d74031595 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.510085] env[62522]: INFO nova.compute.manager [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Terminating instance [ 849.517177] env[62522]: DEBUG oslo_vmware.api [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Waiting for the task: (returnval){ [ 849.517177] env[62522]: value = "task-2415562" [ 849.517177] env[62522]: _type = "Task" [ 849.517177] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.527752] env[62522]: DEBUG oslo_vmware.api [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415562, 'name': CloneVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.532699] env[62522]: INFO nova.compute.manager [-] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Took 1.42 seconds to deallocate network for instance. [ 849.783881] env[62522]: DEBUG nova.network.neutron [req-c4e5f594-1c66-4e34-b91e-8a3a9f85fa6e req-1d2f919f-6d00-49c6-8a1b-6507e14dd66f service nova] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Updated VIF entry in instance network info cache for port 36b110ee-cabf-4e98-b183-605196991aec. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 849.784423] env[62522]: DEBUG nova.network.neutron [req-c4e5f594-1c66-4e34-b91e-8a3a9f85fa6e req-1d2f919f-6d00-49c6-8a1b-6507e14dd66f service nova] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Updating instance_info_cache with network_info: [{"id": "36b110ee-cabf-4e98-b183-605196991aec", "address": "fa:16:3e:3e:f0:91", "network": {"id": "896c53ad-3b58-4e2c-89d9-7fa723dc8e79", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-558196866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "04ba6295b89743a184cc64343ac6bbaf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36b110ee-ca", "ovs_interfaceid": "36b110ee-cabf-4e98-b183-605196991aec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.897827] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.281s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.905681] env[62522]: DEBUG oslo_concurrency.lockutils [None req-dc97c2af-35dd-4f29-8f54-6cc055e70d38 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.096s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.905681] env[62522]: DEBUG nova.objects.instance [None req-dc97c2af-35dd-4f29-8f54-6cc055e70d38 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lazy-loading 'resources' on Instance uuid ee1c638b-1f38-4e21-9369-4d4ff2e13d46 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 849.929311] env[62522]: INFO nova.scheduler.client.report [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Deleted allocations for instance ae3e55b8-00c1-4dae-9276-f46a1e17b80e [ 850.014133] env[62522]: DEBUG nova.compute.manager [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 850.014399] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 850.015317] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f6a8bde-bbc7-465f-86a6-8a92753055da {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.028260] env[62522]: DEBUG oslo_vmware.api [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415562, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.030434] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 850.030759] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-305ee9f5-1925-4621-87a0-a3ebc1d9b092 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.036754] env[62522]: DEBUG oslo_vmware.api [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Waiting for the task: (returnval){ [ 850.036754] env[62522]: value = "task-2415563" [ 850.036754] env[62522]: _type = "Task" [ 850.036754] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.040777] env[62522]: DEBUG oslo_concurrency.lockutils [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.046650] env[62522]: DEBUG oslo_concurrency.lockutils [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "ed7220fa-fee9-4715-acbb-236682c6729e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.047097] env[62522]: DEBUG oslo_concurrency.lockutils [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "ed7220fa-fee9-4715-acbb-236682c6729e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.047421] env[62522]: DEBUG oslo_concurrency.lockutils [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "ed7220fa-fee9-4715-acbb-236682c6729e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.047700] env[62522]: DEBUG oslo_concurrency.lockutils [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "ed7220fa-fee9-4715-acbb-236682c6729e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.047897] env[62522]: DEBUG oslo_concurrency.lockutils [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "ed7220fa-fee9-4715-acbb-236682c6729e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.049828] env[62522]: DEBUG oslo_vmware.api [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': task-2415563, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.050428] env[62522]: INFO nova.compute.manager [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Terminating instance [ 850.254843] env[62522]: DEBUG nova.compute.manager [req-8ebcb37d-ea07-4eb7-a77e-365368ec9d79 req-3614c06f-ba9b-46a0-9483-f6df1098c723 service nova] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Received event network-changed-36b110ee-cabf-4e98-b183-605196991aec {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 850.255457] env[62522]: DEBUG nova.compute.manager [req-8ebcb37d-ea07-4eb7-a77e-365368ec9d79 req-3614c06f-ba9b-46a0-9483-f6df1098c723 service nova] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Refreshing instance network info cache due to event network-changed-36b110ee-cabf-4e98-b183-605196991aec. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 850.255781] env[62522]: DEBUG oslo_concurrency.lockutils [req-8ebcb37d-ea07-4eb7-a77e-365368ec9d79 req-3614c06f-ba9b-46a0-9483-f6df1098c723 service nova] Acquiring lock "refresh_cache-41a980df-88a9-4f9b-b34b-905b226c0675" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 850.287144] env[62522]: DEBUG oslo_concurrency.lockutils [req-c4e5f594-1c66-4e34-b91e-8a3a9f85fa6e req-1d2f919f-6d00-49c6-8a1b-6507e14dd66f service nova] Releasing lock "refresh_cache-41a980df-88a9-4f9b-b34b-905b226c0675" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 850.287613] env[62522]: DEBUG oslo_concurrency.lockutils [req-8ebcb37d-ea07-4eb7-a77e-365368ec9d79 req-3614c06f-ba9b-46a0-9483-f6df1098c723 service nova] Acquired lock "refresh_cache-41a980df-88a9-4f9b-b34b-905b226c0675" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.287808] env[62522]: DEBUG nova.network.neutron [req-8ebcb37d-ea07-4eb7-a77e-365368ec9d79 req-3614c06f-ba9b-46a0-9483-f6df1098c723 service nova] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Refreshing network info cache for port 36b110ee-cabf-4e98-b183-605196991aec {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 850.440229] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8e6215c8-bc53-4ec1-95e0-5182688e1dbb tempest-ServersV294TestFqdnHostnames-734188344 tempest-ServersV294TestFqdnHostnames-734188344-project-member] Lock "ae3e55b8-00c1-4dae-9276-f46a1e17b80e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.912s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.537691] env[62522]: DEBUG oslo_vmware.api [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415562, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.547677] env[62522]: DEBUG oslo_vmware.api [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': task-2415563, 'name': PowerOffVM_Task, 'duration_secs': 0.169989} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.547990] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 850.548212] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 850.548577] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e586dfc8-5eb8-4289-ae1e-05623f6ca273 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.554210] env[62522]: DEBUG nova.compute.manager [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 850.554418] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 850.555216] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ffcb611-4848-4eaa-8264-7acc79d613e5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.565180] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 850.565354] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ed603735-726f-443c-b1e1-8b8061fe507e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.572789] env[62522]: DEBUG oslo_vmware.api [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 850.572789] env[62522]: value = "task-2415565" [ 850.572789] env[62522]: _type = "Task" [ 850.572789] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.584900] env[62522]: DEBUG oslo_vmware.api [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415565, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.612121] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 850.613031] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 850.613031] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Deleting the datastore file [datastore1] 41a980df-88a9-4f9b-b34b-905b226c0675 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 850.613031] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-996e5c4d-405c-4d25-ae63-b18c4810d7f5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.619638] env[62522]: DEBUG oslo_vmware.api [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Waiting for the task: (returnval){ [ 850.619638] env[62522]: value = "task-2415566" [ 850.619638] env[62522]: _type = "Task" [ 850.619638] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.632577] env[62522]: DEBUG oslo_vmware.api [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': task-2415566, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.998715] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e733515e-6de7-43e8-bd16-8d4f031cdfc4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.006556] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c9707e2-f2b6-4950-b140-0cbb24fcda03 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.037040] env[62522]: DEBUG nova.network.neutron [req-8ebcb37d-ea07-4eb7-a77e-365368ec9d79 req-3614c06f-ba9b-46a0-9483-f6df1098c723 service nova] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Updated VIF entry in instance network info cache for port 36b110ee-cabf-4e98-b183-605196991aec. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 851.037418] env[62522]: DEBUG nova.network.neutron [req-8ebcb37d-ea07-4eb7-a77e-365368ec9d79 req-3614c06f-ba9b-46a0-9483-f6df1098c723 service nova] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Updating instance_info_cache with network_info: [{"id": "36b110ee-cabf-4e98-b183-605196991aec", "address": "fa:16:3e:3e:f0:91", "network": {"id": "896c53ad-3b58-4e2c-89d9-7fa723dc8e79", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-558196866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "04ba6295b89743a184cc64343ac6bbaf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36b110ee-ca", "ovs_interfaceid": "36b110ee-cabf-4e98-b183-605196991aec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.042791] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e2edcef-02d1-426c-8633-0f79b0d68cc2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.057102] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d55a66b-27cf-4432-9498-05082f624c35 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.058647] env[62522]: DEBUG oslo_vmware.api [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415562, 'name': CloneVM_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.069858] env[62522]: DEBUG nova.compute.provider_tree [None req-dc97c2af-35dd-4f29-8f54-6cc055e70d38 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 851.082818] env[62522]: DEBUG oslo_vmware.api [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415565, 'name': PowerOffVM_Task, 'duration_secs': 0.204405} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.083112] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 851.083324] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 851.083585] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5721b4ee-8839-4301-8df4-629497f4e585 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.131986] env[62522]: DEBUG oslo_vmware.api [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': task-2415566, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170417} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.132215] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 851.132397] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 851.132565] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 851.132740] env[62522]: INFO nova.compute.manager [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Took 1.12 seconds to destroy the instance on the hypervisor. [ 851.132985] env[62522]: DEBUG oslo.service.loopingcall [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 851.133200] env[62522]: DEBUG nova.compute.manager [-] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 851.133301] env[62522]: DEBUG nova.network.neutron [-] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 851.142962] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 851.143197] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 851.143380] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Deleting the datastore file [datastore1] ed7220fa-fee9-4715-acbb-236682c6729e {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 851.143637] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-623b7d8d-f038-4d65-a414-b726488baa2d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.150522] env[62522]: DEBUG oslo_vmware.api [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 851.150522] env[62522]: value = "task-2415568" [ 851.150522] env[62522]: _type = "Task" [ 851.150522] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.158108] env[62522]: DEBUG oslo_vmware.api [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415568, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.548402] env[62522]: DEBUG oslo_concurrency.lockutils [req-8ebcb37d-ea07-4eb7-a77e-365368ec9d79 req-3614c06f-ba9b-46a0-9483-f6df1098c723 service nova] Releasing lock "refresh_cache-41a980df-88a9-4f9b-b34b-905b226c0675" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.548842] env[62522]: DEBUG oslo_vmware.api [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415562, 'name': CloneVM_Task, 'duration_secs': 1.549255} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.549124] env[62522]: INFO nova.virt.vmwareapi.vmops [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Created linked-clone VM from snapshot [ 851.549906] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abdb6091-aa7a-4704-927a-e3ca700a6a30 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.559069] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Uploading image 32419fa8-e764-4db9-9852-c10270321907 {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 851.573795] env[62522]: DEBUG nova.scheduler.client.report [None req-dc97c2af-35dd-4f29-8f54-6cc055e70d38 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 851.585110] env[62522]: DEBUG oslo_vmware.rw_handles [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 851.585110] env[62522]: value = "vm-489716" [ 851.585110] env[62522]: _type = "VirtualMachine" [ 851.585110] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 851.585378] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-a41c8bce-e56e-4ff6-bfec-65350eba3cc8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.592340] env[62522]: DEBUG nova.compute.manager [req-9460c7cb-4917-4737-bbe6-c0865dababa4 req-84689a7a-9fb7-4316-9741-db718cb73134 service nova] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Received event network-vif-deleted-36b110ee-cabf-4e98-b183-605196991aec {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 851.592566] env[62522]: INFO nova.compute.manager [req-9460c7cb-4917-4737-bbe6-c0865dababa4 req-84689a7a-9fb7-4316-9741-db718cb73134 service nova] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Neutron deleted interface 36b110ee-cabf-4e98-b183-605196991aec; detaching it from the instance and deleting it from the info cache [ 851.592701] env[62522]: DEBUG nova.network.neutron [req-9460c7cb-4917-4737-bbe6-c0865dababa4 req-84689a7a-9fb7-4316-9741-db718cb73134 service nova] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.600771] env[62522]: DEBUG oslo_vmware.rw_handles [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Lease: (returnval){ [ 851.600771] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5286d6b0-c3dc-bbc5-0e39-f5458f85284b" [ 851.600771] env[62522]: _type = "HttpNfcLease" [ 851.600771] env[62522]: } obtained for exporting VM: (result){ [ 851.600771] env[62522]: value = "vm-489716" [ 851.600771] env[62522]: _type = "VirtualMachine" [ 851.600771] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 851.600771] env[62522]: DEBUG oslo_vmware.api [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Waiting for the lease: (returnval){ [ 851.600771] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5286d6b0-c3dc-bbc5-0e39-f5458f85284b" [ 851.600771] env[62522]: _type = "HttpNfcLease" [ 851.600771] env[62522]: } to be ready. {{(pid=62522) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 851.607819] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 851.607819] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5286d6b0-c3dc-bbc5-0e39-f5458f85284b" [ 851.607819] env[62522]: _type = "HttpNfcLease" [ 851.607819] env[62522]: } is ready. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 851.608541] env[62522]: DEBUG oslo_vmware.rw_handles [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 851.608541] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5286d6b0-c3dc-bbc5-0e39-f5458f85284b" [ 851.608541] env[62522]: _type = "HttpNfcLease" [ 851.608541] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 851.609464] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d49ef4fe-baa9-4b8d-b597-c547db2b0a3d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.617525] env[62522]: DEBUG oslo_vmware.rw_handles [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5273b17f-94e1-7382-3868-f2ba088611b0/disk-0.vmdk from lease info. {{(pid=62522) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 851.617697] env[62522]: DEBUG oslo_vmware.rw_handles [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5273b17f-94e1-7382-3868-f2ba088611b0/disk-0.vmdk for reading. {{(pid=62522) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 851.687370] env[62522]: DEBUG oslo_vmware.api [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415568, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135278} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.687621] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 851.687804] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 851.687981] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 851.688168] env[62522]: INFO nova.compute.manager [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Took 1.13 seconds to destroy the instance on the hypervisor. [ 851.688411] env[62522]: DEBUG oslo.service.loopingcall [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 851.688637] env[62522]: DEBUG nova.compute.manager [-] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 851.688706] env[62522]: DEBUG nova.network.neutron [-] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 851.737240] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-8a840042-0407-443a-8a0c-7254bbd812f6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.923320] env[62522]: DEBUG nova.network.neutron [-] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.081579] env[62522]: DEBUG oslo_concurrency.lockutils [None req-dc97c2af-35dd-4f29-8f54-6cc055e70d38 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.178s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.083995] env[62522]: DEBUG oslo_concurrency.lockutils [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.823s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.084759] env[62522]: DEBUG nova.objects.instance [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Lazy-loading 'resources' on Instance uuid d68b472d-2139-4e2d-bb28-7e45d80904cb {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 852.101761] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-56078ba0-1726-4b09-a865-172c0c1fbc00 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.117034] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a2f019d-ad8c-4763-a263-ef1a5a7268a7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.133035] env[62522]: INFO nova.scheduler.client.report [None req-dc97c2af-35dd-4f29-8f54-6cc055e70d38 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Deleted allocations for instance ee1c638b-1f38-4e21-9369-4d4ff2e13d46 [ 852.165022] env[62522]: DEBUG nova.compute.manager [req-9460c7cb-4917-4737-bbe6-c0865dababa4 req-84689a7a-9fb7-4316-9741-db718cb73134 service nova] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Detach interface failed, port_id=36b110ee-cabf-4e98-b183-605196991aec, reason: Instance 41a980df-88a9-4f9b-b34b-905b226c0675 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 852.430980] env[62522]: INFO nova.compute.manager [-] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Took 1.30 seconds to deallocate network for instance. [ 852.596559] env[62522]: DEBUG nova.compute.manager [req-28d46ada-7620-48be-a53c-ee54e2f88bca req-34c786b4-c21b-4fe1-8524-059c173ef046 service nova] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Received event network-changed-e44d8202-0840-41f3-a86d-8baffc8c19dd {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 852.596898] env[62522]: DEBUG nova.compute.manager [req-28d46ada-7620-48be-a53c-ee54e2f88bca req-34c786b4-c21b-4fe1-8524-059c173ef046 service nova] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Refreshing instance network info cache due to event network-changed-e44d8202-0840-41f3-a86d-8baffc8c19dd. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 852.597309] env[62522]: DEBUG oslo_concurrency.lockutils [req-28d46ada-7620-48be-a53c-ee54e2f88bca req-34c786b4-c21b-4fe1-8524-059c173ef046 service nova] Acquiring lock "refresh_cache-c1fd078c-61d4-4c0f-8c49-0f56a926a087" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 852.597744] env[62522]: DEBUG oslo_concurrency.lockutils [req-28d46ada-7620-48be-a53c-ee54e2f88bca req-34c786b4-c21b-4fe1-8524-059c173ef046 service nova] Acquired lock "refresh_cache-c1fd078c-61d4-4c0f-8c49-0f56a926a087" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.598140] env[62522]: DEBUG nova.network.neutron [req-28d46ada-7620-48be-a53c-ee54e2f88bca req-34c786b4-c21b-4fe1-8524-059c173ef046 service nova] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Refreshing network info cache for port e44d8202-0840-41f3-a86d-8baffc8c19dd {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 852.647236] env[62522]: DEBUG nova.network.neutron [-] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.650028] env[62522]: DEBUG oslo_concurrency.lockutils [None req-dc97c2af-35dd-4f29-8f54-6cc055e70d38 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "ee1c638b-1f38-4e21-9369-4d4ff2e13d46" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.723s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.940197] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.139915] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35ae21a3-2d49-4ec1-ba16-7b7f28a54e3b tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Acquiring lock "41a980df-88a9-4f9b-b34b-905b226c0675" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.152027] env[62522]: INFO nova.compute.manager [-] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Took 1.46 seconds to deallocate network for instance. [ 853.291700] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92bf1aa4-38df-4cff-a626-b52db5a56731 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.300997] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25be8c82-068d-4fb0-8b5f-3cdc41084086 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.339972] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ca8c0fa-07c6-464e-ae5b-cffbe6d7ef5c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.352335] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-193f05b4-135e-40c7-9e54-cbc036ccbf66 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.385491] env[62522]: DEBUG nova.compute.provider_tree [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 853.409212] env[62522]: DEBUG nova.network.neutron [req-28d46ada-7620-48be-a53c-ee54e2f88bca req-34c786b4-c21b-4fe1-8524-059c173ef046 service nova] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Updated VIF entry in instance network info cache for port e44d8202-0840-41f3-a86d-8baffc8c19dd. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 853.409212] env[62522]: DEBUG nova.network.neutron [req-28d46ada-7620-48be-a53c-ee54e2f88bca req-34c786b4-c21b-4fe1-8524-059c173ef046 service nova] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Updating instance_info_cache with network_info: [{"id": "e44d8202-0840-41f3-a86d-8baffc8c19dd", "address": "fa:16:3e:bc:f2:43", "network": {"id": "896c53ad-3b58-4e2c-89d9-7fa723dc8e79", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-558196866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "04ba6295b89743a184cc64343ac6bbaf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape44d8202-08", "ovs_interfaceid": "e44d8202-0840-41f3-a86d-8baffc8c19dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.663486] env[62522]: DEBUG oslo_concurrency.lockutils [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.680065] env[62522]: DEBUG nova.compute.manager [req-b81de2b0-3dfe-459e-b9e9-3b2550603592 req-33b171ce-2e0d-4151-9b0e-928072b075c3 service nova] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Received event network-vif-deleted-d0636bb2-edb5-41e8-a81d-092c355e770c {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 853.888146] env[62522]: DEBUG nova.scheduler.client.report [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 853.912495] env[62522]: DEBUG oslo_concurrency.lockutils [req-28d46ada-7620-48be-a53c-ee54e2f88bca req-34c786b4-c21b-4fe1-8524-059c173ef046 service nova] Releasing lock "refresh_cache-c1fd078c-61d4-4c0f-8c49-0f56a926a087" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.915089] env[62522]: DEBUG nova.compute.manager [req-28d46ada-7620-48be-a53c-ee54e2f88bca req-34c786b4-c21b-4fe1-8524-059c173ef046 service nova] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Received event network-changed-e44d8202-0840-41f3-a86d-8baffc8c19dd {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 853.915089] env[62522]: DEBUG nova.compute.manager [req-28d46ada-7620-48be-a53c-ee54e2f88bca req-34c786b4-c21b-4fe1-8524-059c173ef046 service nova] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Refreshing instance network info cache due to event network-changed-e44d8202-0840-41f3-a86d-8baffc8c19dd. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 853.915089] env[62522]: DEBUG oslo_concurrency.lockutils [req-28d46ada-7620-48be-a53c-ee54e2f88bca req-34c786b4-c21b-4fe1-8524-059c173ef046 service nova] Acquiring lock "refresh_cache-c1fd078c-61d4-4c0f-8c49-0f56a926a087" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.915089] env[62522]: DEBUG oslo_concurrency.lockutils [req-28d46ada-7620-48be-a53c-ee54e2f88bca req-34c786b4-c21b-4fe1-8524-059c173ef046 service nova] Acquired lock "refresh_cache-c1fd078c-61d4-4c0f-8c49-0f56a926a087" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.915089] env[62522]: DEBUG nova.network.neutron [req-28d46ada-7620-48be-a53c-ee54e2f88bca req-34c786b4-c21b-4fe1-8524-059c173ef046 service nova] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Refreshing network info cache for port e44d8202-0840-41f3-a86d-8baffc8c19dd {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 854.394017] env[62522]: DEBUG oslo_concurrency.lockutils [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.310s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.397520] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.262s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.397520] env[62522]: DEBUG nova.objects.instance [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Lazy-loading 'resources' on Instance uuid d266aff3-42b4-4dcb-b8ca-7c13cdf8d314 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 854.429798] env[62522]: INFO nova.scheduler.client.report [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Deleted allocations for instance d68b472d-2139-4e2d-bb28-7e45d80904cb [ 854.658445] env[62522]: DEBUG nova.network.neutron [req-28d46ada-7620-48be-a53c-ee54e2f88bca req-34c786b4-c21b-4fe1-8524-059c173ef046 service nova] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Updated VIF entry in instance network info cache for port e44d8202-0840-41f3-a86d-8baffc8c19dd. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 854.658811] env[62522]: DEBUG nova.network.neutron [req-28d46ada-7620-48be-a53c-ee54e2f88bca req-34c786b4-c21b-4fe1-8524-059c173ef046 service nova] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Updating instance_info_cache with network_info: [{"id": "e44d8202-0840-41f3-a86d-8baffc8c19dd", "address": "fa:16:3e:bc:f2:43", "network": {"id": "896c53ad-3b58-4e2c-89d9-7fa723dc8e79", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-558196866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "04ba6295b89743a184cc64343ac6bbaf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape44d8202-08", "ovs_interfaceid": "e44d8202-0840-41f3-a86d-8baffc8c19dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.941600] env[62522]: DEBUG oslo_concurrency.lockutils [None req-52236c40-728a-4cb2-ba8a-3b76b3a88859 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Lock "d68b472d-2139-4e2d-bb28-7e45d80904cb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.407s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.168020] env[62522]: DEBUG oslo_concurrency.lockutils [req-28d46ada-7620-48be-a53c-ee54e2f88bca req-34c786b4-c21b-4fe1-8524-059c173ef046 service nova] Releasing lock "refresh_cache-c1fd078c-61d4-4c0f-8c49-0f56a926a087" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.394357] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-355be5f9-d67a-45ad-9b02-3177450be977 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.404186] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29f8f7ba-bd4b-4d7f-a330-30846f88b44b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.441632] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a25c3e-1e65-4598-a49c-96677768a68b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.449797] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf9a6ca3-83d1-46d0-ac4f-ee4e26ee5ae7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.467770] env[62522]: DEBUG nova.compute.provider_tree [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 855.971359] env[62522]: DEBUG nova.scheduler.client.report [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 856.123425] env[62522]: DEBUG oslo_concurrency.lockutils [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquiring lock "8b21b749-b872-43f7-a2c5-aefee6c5f3a1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.123425] env[62522]: DEBUG oslo_concurrency.lockutils [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Lock "8b21b749-b872-43f7-a2c5-aefee6c5f3a1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.483574] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.085s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.485025] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.706s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.486703] env[62522]: INFO nova.compute.claims [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 856.531337] env[62522]: INFO nova.scheduler.client.report [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Deleted allocations for instance d266aff3-42b4-4dcb-b8ca-7c13cdf8d314 [ 857.044442] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d7838d0b-0f44-4a55-9965-9c5cb7cdf940 tempest-AttachInterfacesUnderV243Test-1120479289 tempest-AttachInterfacesUnderV243Test-1120479289-project-member] Lock "d266aff3-42b4-4dcb-b8ca-7c13cdf8d314" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.052s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.924856] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Acquiring lock "0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.925163] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Lock "0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.075601] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-826e047c-e05a-4b77-85d9-2de72b4f4317 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.085121] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a67d5b3b-cad7-4603-b386-9041425d56e9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.117429] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4db7d6c7-e7fd-414a-80c3-880fa248b752 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.125184] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaa02abb-9b1c-48c7-acb4-0003a8d7ee4f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.143526] env[62522]: DEBUG nova.compute.provider_tree [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 858.646374] env[62522]: DEBUG nova.scheduler.client.report [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 858.840409] env[62522]: DEBUG oslo_vmware.rw_handles [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5273b17f-94e1-7382-3868-f2ba088611b0/disk-0.vmdk. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 858.841660] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3f9c0c0-2c3a-4338-915a-62242300e15a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.847855] env[62522]: DEBUG oslo_vmware.rw_handles [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5273b17f-94e1-7382-3868-f2ba088611b0/disk-0.vmdk is in state: ready. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 858.848040] env[62522]: ERROR oslo_vmware.rw_handles [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5273b17f-94e1-7382-3868-f2ba088611b0/disk-0.vmdk due to incomplete transfer. [ 858.848271] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-356734e8-090c-40c8-8df4-a8f9339d48bc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.855270] env[62522]: DEBUG oslo_vmware.rw_handles [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5273b17f-94e1-7382-3868-f2ba088611b0/disk-0.vmdk. {{(pid=62522) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 858.855471] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Uploaded image 32419fa8-e764-4db9-9852-c10270321907 to the Glance image server {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 858.858504] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Destroying the VM {{(pid=62522) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 858.858978] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e5e9900e-7051-4e1e-a40c-c789d0cc2fe7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.866512] env[62522]: DEBUG oslo_vmware.api [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Waiting for the task: (returnval){ [ 858.866512] env[62522]: value = "task-2415570" [ 858.866512] env[62522]: _type = "Task" [ 858.866512] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.874878] env[62522]: DEBUG oslo_vmware.api [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415570, 'name': Destroy_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.151850] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.667s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.152790] env[62522]: DEBUG nova.compute.manager [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 859.156465] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.526s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.158259] env[62522]: INFO nova.compute.claims [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 859.377979] env[62522]: DEBUG oslo_vmware.api [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415570, 'name': Destroy_Task, 'duration_secs': 0.318857} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.377979] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Destroyed the VM [ 859.378638] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Deleting Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 859.378638] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-65c5a147-4f13-42bb-a19f-8c5ed6459644 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.385312] env[62522]: DEBUG oslo_vmware.api [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Waiting for the task: (returnval){ [ 859.385312] env[62522]: value = "task-2415571" [ 859.385312] env[62522]: _type = "Task" [ 859.385312] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.395627] env[62522]: DEBUG oslo_vmware.api [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415571, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.663936] env[62522]: DEBUG nova.compute.utils [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 859.669299] env[62522]: DEBUG nova.compute.manager [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 859.669593] env[62522]: DEBUG nova.network.neutron [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 859.741409] env[62522]: DEBUG nova.policy [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6aeef0de62c648a0a7d9d17caef4bc3b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '62105741677343cd9177bf00786ae761', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 859.895701] env[62522]: DEBUG oslo_vmware.api [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415571, 'name': RemoveSnapshot_Task, 'duration_secs': 0.49702} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.895913] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Deleted Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 859.896150] env[62522]: INFO nova.compute.manager [None req-3db7ca15-b711-43cd-94a7-2fe941557310 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Took 12.47 seconds to snapshot the instance on the hypervisor. [ 860.083647] env[62522]: DEBUG nova.network.neutron [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Successfully created port: 32b31ef7-1045-4a88-9dba-5d5e5040efc2 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 860.172994] env[62522]: DEBUG nova.compute.manager [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 860.653027] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-334e263e-b1aa-49d6-ba2f-f607df7b9998 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.662469] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57a2355b-6034-4398-aafc-1b09a81c41ab {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.696576] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b15739f-61c9-466b-96a3-4ba3aadc0cff {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.704298] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3d2b199-5d52-4c69-8a1d-e704c0bd09f3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.718187] env[62522]: DEBUG nova.compute.provider_tree [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 861.200022] env[62522]: DEBUG nova.compute.manager [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 861.220990] env[62522]: DEBUG nova.virt.hardware [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 861.221723] env[62522]: DEBUG nova.virt.hardware [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 861.222018] env[62522]: DEBUG nova.virt.hardware [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 861.222422] env[62522]: DEBUG nova.virt.hardware [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 861.222759] env[62522]: DEBUG nova.virt.hardware [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 861.223124] env[62522]: DEBUG nova.virt.hardware [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 861.223618] env[62522]: DEBUG nova.virt.hardware [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 861.224085] env[62522]: DEBUG nova.virt.hardware [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 861.224420] env[62522]: DEBUG nova.virt.hardware [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 861.224799] env[62522]: DEBUG nova.virt.hardware [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 861.225267] env[62522]: DEBUG nova.virt.hardware [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 861.226657] env[62522]: DEBUG nova.scheduler.client.report [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 861.231144] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82268064-d957-428e-8a02-faf3df01f5f6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.240092] env[62522]: DEBUG oslo_concurrency.lockutils [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Acquiring lock "504396d8-077d-4563-91b5-a7a6259eea27" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.240386] env[62522]: DEBUG oslo_concurrency.lockutils [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Lock "504396d8-077d-4563-91b5-a7a6259eea27" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.240600] env[62522]: DEBUG oslo_concurrency.lockutils [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Acquiring lock "504396d8-077d-4563-91b5-a7a6259eea27-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.240886] env[62522]: DEBUG oslo_concurrency.lockutils [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Lock "504396d8-077d-4563-91b5-a7a6259eea27-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.241155] env[62522]: DEBUG oslo_concurrency.lockutils [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Lock "504396d8-077d-4563-91b5-a7a6259eea27-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.244379] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c24ae44-b43f-434a-b5ae-a848d303e497 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.248602] env[62522]: INFO nova.compute.manager [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Terminating instance [ 861.528708] env[62522]: DEBUG nova.compute.manager [req-4c272c83-f674-4f0b-bc1e-4674f1fdd3a5 req-4a8f0046-87ae-4b37-822b-55a079e007fc service nova] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Received event network-vif-plugged-32b31ef7-1045-4a88-9dba-5d5e5040efc2 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 861.528944] env[62522]: DEBUG oslo_concurrency.lockutils [req-4c272c83-f674-4f0b-bc1e-4674f1fdd3a5 req-4a8f0046-87ae-4b37-822b-55a079e007fc service nova] Acquiring lock "a10c4dee-4490-445a-bea2-9f8ef5425d15-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.529595] env[62522]: DEBUG oslo_concurrency.lockutils [req-4c272c83-f674-4f0b-bc1e-4674f1fdd3a5 req-4a8f0046-87ae-4b37-822b-55a079e007fc service nova] Lock "a10c4dee-4490-445a-bea2-9f8ef5425d15-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.529804] env[62522]: DEBUG oslo_concurrency.lockutils [req-4c272c83-f674-4f0b-bc1e-4674f1fdd3a5 req-4a8f0046-87ae-4b37-822b-55a079e007fc service nova] Lock "a10c4dee-4490-445a-bea2-9f8ef5425d15-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.529984] env[62522]: DEBUG nova.compute.manager [req-4c272c83-f674-4f0b-bc1e-4674f1fdd3a5 req-4a8f0046-87ae-4b37-822b-55a079e007fc service nova] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] No waiting events found dispatching network-vif-plugged-32b31ef7-1045-4a88-9dba-5d5e5040efc2 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 861.530175] env[62522]: WARNING nova.compute.manager [req-4c272c83-f674-4f0b-bc1e-4674f1fdd3a5 req-4a8f0046-87ae-4b37-822b-55a079e007fc service nova] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Received unexpected event network-vif-plugged-32b31ef7-1045-4a88-9dba-5d5e5040efc2 for instance with vm_state building and task_state spawning. [ 861.631427] env[62522]: DEBUG nova.network.neutron [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Successfully updated port: 32b31ef7-1045-4a88-9dba-5d5e5040efc2 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 861.736572] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.580s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.737178] env[62522]: DEBUG nova.compute.manager [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 861.740050] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.667s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.741282] env[62522]: INFO nova.compute.claims [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 861.753305] env[62522]: DEBUG nova.compute.manager [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 861.753502] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 861.754558] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-050b74b4-7c3c-401b-b1ec-cb85d0bfd55f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.763063] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 861.763063] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3b2f8425-d498-419d-8706-d36667525c0c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.769568] env[62522]: DEBUG oslo_vmware.api [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Waiting for the task: (returnval){ [ 861.769568] env[62522]: value = "task-2415572" [ 861.769568] env[62522]: _type = "Task" [ 861.769568] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.778262] env[62522]: DEBUG oslo_vmware.api [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415572, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.133981] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Acquiring lock "refresh_cache-a10c4dee-4490-445a-bea2-9f8ef5425d15" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 862.135236] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Acquired lock "refresh_cache-a10c4dee-4490-445a-bea2-9f8ef5425d15" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.135236] env[62522]: DEBUG nova.network.neutron [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 862.246034] env[62522]: DEBUG nova.compute.utils [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 862.247567] env[62522]: DEBUG nova.compute.manager [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 862.247673] env[62522]: DEBUG nova.network.neutron [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 862.279436] env[62522]: DEBUG oslo_vmware.api [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415572, 'name': PowerOffVM_Task, 'duration_secs': 0.458064} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.279722] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 862.279894] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 862.280148] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7b237ffc-71e4-42b4-a8a7-ee2f0e850c7f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.304214] env[62522]: DEBUG nova.policy [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '40504d2538e34ec2b02cc43b616aafbd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91dee2b9e8bd456cbb55667383b0058d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 862.342595] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 862.342949] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 862.343220] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Deleting the datastore file [datastore2] 504396d8-077d-4563-91b5-a7a6259eea27 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 862.343501] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1084a389-6b33-4e4d-9a66-c6c32f7d1878 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.349902] env[62522]: DEBUG oslo_vmware.api [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Waiting for the task: (returnval){ [ 862.349902] env[62522]: value = "task-2415574" [ 862.349902] env[62522]: _type = "Task" [ 862.349902] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.360072] env[62522]: DEBUG oslo_vmware.api [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415574, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.615232] env[62522]: DEBUG nova.network.neutron [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Successfully created port: d830d64b-94fa-4bc8-a3e6-e45c4b0ae629 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 862.676157] env[62522]: DEBUG nova.network.neutron [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 862.750675] env[62522]: DEBUG nova.compute.manager [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 862.872183] env[62522]: DEBUG oslo_vmware.api [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Task: {'id': task-2415574, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142617} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.874985] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 862.875200] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 862.875381] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 862.875554] env[62522]: INFO nova.compute.manager [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Took 1.12 seconds to destroy the instance on the hypervisor. [ 862.875795] env[62522]: DEBUG oslo.service.loopingcall [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 862.876171] env[62522]: DEBUG nova.compute.manager [-] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 862.876270] env[62522]: DEBUG nova.network.neutron [-] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 862.932647] env[62522]: DEBUG nova.network.neutron [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Updating instance_info_cache with network_info: [{"id": "32b31ef7-1045-4a88-9dba-5d5e5040efc2", "address": "fa:16:3e:44:8d:78", "network": {"id": "214c091d-98dd-489b-8152-8da8572345b9", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-231986862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62105741677343cd9177bf00786ae761", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e30245c5-78f5-48e6-b504-c6c21f5a9b45", "external-id": "nsx-vlan-transportzone-409", "segmentation_id": 409, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32b31ef7-10", "ovs_interfaceid": "32b31ef7-1045-4a88-9dba-5d5e5040efc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.261291] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28256375-bf52-490e-abc9-e5a2dec688c0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.269051] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-843e460d-6071-4488-af97-78069b072c88 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.300500] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d95708cd-7127-4ddd-93a4-5e601bf7779f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.304162] env[62522]: DEBUG nova.compute.manager [req-dde1f097-5373-45db-90f9-08a001c73503 req-28301565-2933-4592-af75-4c3996c4ab38 service nova] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Received event network-vif-deleted-119d9006-8624-413c-94e2-a9ed9cbba8cb {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 863.304387] env[62522]: INFO nova.compute.manager [req-dde1f097-5373-45db-90f9-08a001c73503 req-28301565-2933-4592-af75-4c3996c4ab38 service nova] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Neutron deleted interface 119d9006-8624-413c-94e2-a9ed9cbba8cb; detaching it from the instance and deleting it from the info cache [ 863.304560] env[62522]: DEBUG nova.network.neutron [req-dde1f097-5373-45db-90f9-08a001c73503 req-28301565-2933-4592-af75-4c3996c4ab38 service nova] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.312177] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc2413c-2565-45c7-b675-4930bd2b8147 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.326223] env[62522]: DEBUG nova.compute.provider_tree [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 863.437315] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Releasing lock "refresh_cache-a10c4dee-4490-445a-bea2-9f8ef5425d15" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 863.437902] env[62522]: DEBUG nova.compute.manager [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Instance network_info: |[{"id": "32b31ef7-1045-4a88-9dba-5d5e5040efc2", "address": "fa:16:3e:44:8d:78", "network": {"id": "214c091d-98dd-489b-8152-8da8572345b9", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-231986862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62105741677343cd9177bf00786ae761", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e30245c5-78f5-48e6-b504-c6c21f5a9b45", "external-id": "nsx-vlan-transportzone-409", "segmentation_id": 409, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32b31ef7-10", "ovs_interfaceid": "32b31ef7-1045-4a88-9dba-5d5e5040efc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 863.438191] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:44:8d:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e30245c5-78f5-48e6-b504-c6c21f5a9b45', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '32b31ef7-1045-4a88-9dba-5d5e5040efc2', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 863.446451] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Creating folder: Project (62105741677343cd9177bf00786ae761). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 863.447090] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9df478b8-057a-4109-aa63-433fee0985b8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.458206] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Created folder: Project (62105741677343cd9177bf00786ae761) in parent group-v489562. [ 863.458374] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Creating folder: Instances. Parent ref: group-v489717. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 863.458613] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e470fb86-3ffb-44d6-95ef-e9acfbde9bde {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.468073] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Created folder: Instances in parent group-v489717. [ 863.468311] env[62522]: DEBUG oslo.service.loopingcall [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 863.468500] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 863.468694] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ee2fa9c1-63c7-491a-b078-d0a1d521ef53 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.487485] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 863.487485] env[62522]: value = "task-2415577" [ 863.487485] env[62522]: _type = "Task" [ 863.487485] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.495338] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415577, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.560390] env[62522]: DEBUG nova.compute.manager [req-7829404e-77b5-4aea-bd74-dc279b7c3e4b req-90c1e6bf-3a2b-4e2d-83e1-896002ba0064 service nova] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Received event network-changed-32b31ef7-1045-4a88-9dba-5d5e5040efc2 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 863.560587] env[62522]: DEBUG nova.compute.manager [req-7829404e-77b5-4aea-bd74-dc279b7c3e4b req-90c1e6bf-3a2b-4e2d-83e1-896002ba0064 service nova] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Refreshing instance network info cache due to event network-changed-32b31ef7-1045-4a88-9dba-5d5e5040efc2. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 863.560834] env[62522]: DEBUG oslo_concurrency.lockutils [req-7829404e-77b5-4aea-bd74-dc279b7c3e4b req-90c1e6bf-3a2b-4e2d-83e1-896002ba0064 service nova] Acquiring lock "refresh_cache-a10c4dee-4490-445a-bea2-9f8ef5425d15" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 863.561031] env[62522]: DEBUG oslo_concurrency.lockutils [req-7829404e-77b5-4aea-bd74-dc279b7c3e4b req-90c1e6bf-3a2b-4e2d-83e1-896002ba0064 service nova] Acquired lock "refresh_cache-a10c4dee-4490-445a-bea2-9f8ef5425d15" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.561205] env[62522]: DEBUG nova.network.neutron [req-7829404e-77b5-4aea-bd74-dc279b7c3e4b req-90c1e6bf-3a2b-4e2d-83e1-896002ba0064 service nova] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Refreshing network info cache for port 32b31ef7-1045-4a88-9dba-5d5e5040efc2 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 863.691471] env[62522]: DEBUG nova.network.neutron [-] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.761233] env[62522]: DEBUG nova.compute.manager [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 863.786860] env[62522]: DEBUG nova.virt.hardware [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:21:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1bf21d87-4ee8-4637-a3ba-85267d79b549',id=39,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1714837760',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 863.787113] env[62522]: DEBUG nova.virt.hardware [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 863.787271] env[62522]: DEBUG nova.virt.hardware [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 863.787450] env[62522]: DEBUG nova.virt.hardware [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 863.787594] env[62522]: DEBUG nova.virt.hardware [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 863.787740] env[62522]: DEBUG nova.virt.hardware [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 863.787946] env[62522]: DEBUG nova.virt.hardware [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 863.788118] env[62522]: DEBUG nova.virt.hardware [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 863.788288] env[62522]: DEBUG nova.virt.hardware [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 863.788448] env[62522]: DEBUG nova.virt.hardware [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 863.788619] env[62522]: DEBUG nova.virt.hardware [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 863.789497] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf059e4-c9b5-4e1a-96e3-2fa87780da3b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.797310] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03d66b0f-6cc9-4c7f-9462-f98e7e6db989 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.810544] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cb48d398-512f-413f-b59e-9ae202442a9b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.818386] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aac41740-3af8-474a-a7b4-71dfbfe0888d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.830023] env[62522]: DEBUG nova.scheduler.client.report [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 863.851626] env[62522]: DEBUG nova.compute.manager [req-dde1f097-5373-45db-90f9-08a001c73503 req-28301565-2933-4592-af75-4c3996c4ab38 service nova] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Detach interface failed, port_id=119d9006-8624-413c-94e2-a9ed9cbba8cb, reason: Instance 504396d8-077d-4563-91b5-a7a6259eea27 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 863.997739] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415577, 'name': CreateVM_Task, 'duration_secs': 0.307962} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.997931] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 863.998589] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 863.998754] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.999086] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 863.999340] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7873d334-5a33-42ac-a4ef-3f75a8bb43ae {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.003773] env[62522]: DEBUG oslo_vmware.api [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Waiting for the task: (returnval){ [ 864.003773] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525b94ed-d26f-1159-5f2f-b0e9038a885e" [ 864.003773] env[62522]: _type = "Task" [ 864.003773] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.011638] env[62522]: DEBUG oslo_vmware.api [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525b94ed-d26f-1159-5f2f-b0e9038a885e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.194400] env[62522]: INFO nova.compute.manager [-] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Took 1.32 seconds to deallocate network for instance. [ 864.235680] env[62522]: DEBUG nova.network.neutron [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Successfully updated port: d830d64b-94fa-4bc8-a3e6-e45c4b0ae629 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 864.329606] env[62522]: DEBUG nova.network.neutron [req-7829404e-77b5-4aea-bd74-dc279b7c3e4b req-90c1e6bf-3a2b-4e2d-83e1-896002ba0064 service nova] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Updated VIF entry in instance network info cache for port 32b31ef7-1045-4a88-9dba-5d5e5040efc2. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 864.330027] env[62522]: DEBUG nova.network.neutron [req-7829404e-77b5-4aea-bd74-dc279b7c3e4b req-90c1e6bf-3a2b-4e2d-83e1-896002ba0064 service nova] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Updating instance_info_cache with network_info: [{"id": "32b31ef7-1045-4a88-9dba-5d5e5040efc2", "address": "fa:16:3e:44:8d:78", "network": {"id": "214c091d-98dd-489b-8152-8da8572345b9", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-231986862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62105741677343cd9177bf00786ae761", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e30245c5-78f5-48e6-b504-c6c21f5a9b45", "external-id": "nsx-vlan-transportzone-409", "segmentation_id": 409, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32b31ef7-10", "ovs_interfaceid": "32b31ef7-1045-4a88-9dba-5d5e5040efc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.334530] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.595s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.334999] env[62522]: DEBUG nova.compute.manager [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 864.337742] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.631s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 864.340621] env[62522]: INFO nova.compute.claims [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 864.515611] env[62522]: DEBUG oslo_vmware.api [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525b94ed-d26f-1159-5f2f-b0e9038a885e, 'name': SearchDatastore_Task, 'duration_secs': 0.009378} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.515611] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 864.515765] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 864.515966] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 864.516130] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.516315] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 864.516577] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-468b616d-e00e-4a34-83b7-b10dcec79772 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.525905] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 864.525905] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 864.526056] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d794d6d-90f8-4796-aaf3-465e7eadf76b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.531927] env[62522]: DEBUG oslo_vmware.api [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Waiting for the task: (returnval){ [ 864.531927] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bc0f89-767a-ca6c-a593-2ae3a84d2f76" [ 864.531927] env[62522]: _type = "Task" [ 864.531927] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.539474] env[62522]: DEBUG oslo_vmware.api [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bc0f89-767a-ca6c-a593-2ae3a84d2f76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.701875] env[62522]: DEBUG oslo_concurrency.lockutils [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 864.738117] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquiring lock "refresh_cache-74e52638-d284-4bd1-8cff-c7aca9426f75" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 864.738117] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquired lock "refresh_cache-74e52638-d284-4bd1-8cff-c7aca9426f75" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.738117] env[62522]: DEBUG nova.network.neutron [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 864.832715] env[62522]: DEBUG oslo_concurrency.lockutils [req-7829404e-77b5-4aea-bd74-dc279b7c3e4b req-90c1e6bf-3a2b-4e2d-83e1-896002ba0064 service nova] Releasing lock "refresh_cache-a10c4dee-4490-445a-bea2-9f8ef5425d15" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 864.848911] env[62522]: DEBUG nova.compute.utils [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 864.851042] env[62522]: DEBUG nova.compute.manager [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 864.851042] env[62522]: DEBUG nova.network.neutron [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 864.873293] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 864.873646] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 864.929994] env[62522]: DEBUG nova.policy [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc1c3efee2cd4153a413daebd5ceefed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cd5f87d0072e4d63acd3d201754f72f6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 865.043976] env[62522]: DEBUG oslo_vmware.api [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bc0f89-767a-ca6c-a593-2ae3a84d2f76, 'name': SearchDatastore_Task, 'duration_secs': 0.008257} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.044767] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3892804-e61f-4701-a09a-43c6fe32e77c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.050042] env[62522]: DEBUG oslo_vmware.api [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Waiting for the task: (returnval){ [ 865.050042] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52facdd0-d49a-cd8f-4755-cdec4ba112dd" [ 865.050042] env[62522]: _type = "Task" [ 865.050042] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.057551] env[62522]: DEBUG oslo_vmware.api [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52facdd0-d49a-cd8f-4755-cdec4ba112dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.276080] env[62522]: DEBUG nova.network.neutron [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 865.317042] env[62522]: DEBUG nova.network.neutron [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Successfully created port: 9d7170cd-1d24-4b21-84d5-6f67ba579199 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 865.357617] env[62522]: DEBUG nova.compute.manager [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 865.388433] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 865.388433] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Starting heal instance info cache {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 865.447044] env[62522]: DEBUG nova.network.neutron [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Updating instance_info_cache with network_info: [{"id": "d830d64b-94fa-4bc8-a3e6-e45c4b0ae629", "address": "fa:16:3e:d4:80:4e", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.185", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd830d64b-94", "ovs_interfaceid": "d830d64b-94fa-4bc8-a3e6-e45c4b0ae629", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.570165] env[62522]: DEBUG oslo_vmware.api [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52facdd0-d49a-cd8f-4755-cdec4ba112dd, 'name': SearchDatastore_Task, 'duration_secs': 0.009094} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.572977] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 865.573184] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] a10c4dee-4490-445a-bea2-9f8ef5425d15/a10c4dee-4490-445a-bea2-9f8ef5425d15.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 865.573566] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-83364e07-d224-4e2f-bfbe-bfa4bde8c074 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.580642] env[62522]: DEBUG oslo_vmware.api [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Waiting for the task: (returnval){ [ 865.580642] env[62522]: value = "task-2415578" [ 865.580642] env[62522]: _type = "Task" [ 865.580642] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.590322] env[62522]: DEBUG nova.compute.manager [req-bbed1cca-673b-4603-bcc4-812461043ab4 req-8c3bd1fd-e8d9-4c0d-b0f9-b361a3702353 service nova] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Received event network-vif-plugged-d830d64b-94fa-4bc8-a3e6-e45c4b0ae629 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 865.590322] env[62522]: DEBUG oslo_concurrency.lockutils [req-bbed1cca-673b-4603-bcc4-812461043ab4 req-8c3bd1fd-e8d9-4c0d-b0f9-b361a3702353 service nova] Acquiring lock "74e52638-d284-4bd1-8cff-c7aca9426f75-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.590490] env[62522]: DEBUG oslo_concurrency.lockutils [req-bbed1cca-673b-4603-bcc4-812461043ab4 req-8c3bd1fd-e8d9-4c0d-b0f9-b361a3702353 service nova] Lock "74e52638-d284-4bd1-8cff-c7aca9426f75-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.590679] env[62522]: DEBUG oslo_concurrency.lockutils [req-bbed1cca-673b-4603-bcc4-812461043ab4 req-8c3bd1fd-e8d9-4c0d-b0f9-b361a3702353 service nova] Lock "74e52638-d284-4bd1-8cff-c7aca9426f75-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.590907] env[62522]: DEBUG nova.compute.manager [req-bbed1cca-673b-4603-bcc4-812461043ab4 req-8c3bd1fd-e8d9-4c0d-b0f9-b361a3702353 service nova] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] No waiting events found dispatching network-vif-plugged-d830d64b-94fa-4bc8-a3e6-e45c4b0ae629 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 865.591143] env[62522]: WARNING nova.compute.manager [req-bbed1cca-673b-4603-bcc4-812461043ab4 req-8c3bd1fd-e8d9-4c0d-b0f9-b361a3702353 service nova] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Received unexpected event network-vif-plugged-d830d64b-94fa-4bc8-a3e6-e45c4b0ae629 for instance with vm_state building and task_state spawning. [ 865.591368] env[62522]: DEBUG nova.compute.manager [req-bbed1cca-673b-4603-bcc4-812461043ab4 req-8c3bd1fd-e8d9-4c0d-b0f9-b361a3702353 service nova] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Received event network-changed-d830d64b-94fa-4bc8-a3e6-e45c4b0ae629 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 865.591650] env[62522]: DEBUG nova.compute.manager [req-bbed1cca-673b-4603-bcc4-812461043ab4 req-8c3bd1fd-e8d9-4c0d-b0f9-b361a3702353 service nova] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Refreshing instance network info cache due to event network-changed-d830d64b-94fa-4bc8-a3e6-e45c4b0ae629. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 865.591924] env[62522]: DEBUG oslo_concurrency.lockutils [req-bbed1cca-673b-4603-bcc4-812461043ab4 req-8c3bd1fd-e8d9-4c0d-b0f9-b361a3702353 service nova] Acquiring lock "refresh_cache-74e52638-d284-4bd1-8cff-c7aca9426f75" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 865.596630] env[62522]: DEBUG oslo_vmware.api [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Task: {'id': task-2415578, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.869037] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1999f70f-1a57-41f0-b155-bcbcaaf8e378 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.876521] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e33ee84-aef9-437e-a910-4427879e5cae {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.910215] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a1d103c-bc70-4bc7-ad65-32e2299ea9cb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.919033] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-601304af-437b-4418-8d0b-2e750c95f83f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.933785] env[62522]: DEBUG nova.compute.provider_tree [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 865.949482] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Releasing lock "refresh_cache-74e52638-d284-4bd1-8cff-c7aca9426f75" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 865.950020] env[62522]: DEBUG nova.compute.manager [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Instance network_info: |[{"id": "d830d64b-94fa-4bc8-a3e6-e45c4b0ae629", "address": "fa:16:3e:d4:80:4e", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.185", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd830d64b-94", "ovs_interfaceid": "d830d64b-94fa-4bc8-a3e6-e45c4b0ae629", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 865.950345] env[62522]: DEBUG oslo_concurrency.lockutils [req-bbed1cca-673b-4603-bcc4-812461043ab4 req-8c3bd1fd-e8d9-4c0d-b0f9-b361a3702353 service nova] Acquired lock "refresh_cache-74e52638-d284-4bd1-8cff-c7aca9426f75" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.950595] env[62522]: DEBUG nova.network.neutron [req-bbed1cca-673b-4603-bcc4-812461043ab4 req-8c3bd1fd-e8d9-4c0d-b0f9-b361a3702353 service nova] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Refreshing network info cache for port d830d64b-94fa-4bc8-a3e6-e45c4b0ae629 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 865.952345] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d4:80:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f6d1427-d86b-4371-9172-50e4bb0eb1cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd830d64b-94fa-4bc8-a3e6-e45c4b0ae629', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 865.961083] env[62522]: DEBUG oslo.service.loopingcall [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 865.962235] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 865.962505] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-92f2a3ad-4ccf-423b-8ed8-c71924e1a953 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.983575] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 865.983575] env[62522]: value = "task-2415579" [ 865.983575] env[62522]: _type = "Task" [ 865.983575] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.992461] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415579, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.090596] env[62522]: DEBUG oslo_vmware.api [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Task: {'id': task-2415578, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.431327} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.090871] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] a10c4dee-4490-445a-bea2-9f8ef5425d15/a10c4dee-4490-445a-bea2-9f8ef5425d15.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 866.091102] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 866.091911] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ce84ef1c-65db-49f8-81c0-1e6f65d73522 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.097712] env[62522]: DEBUG oslo_vmware.api [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Waiting for the task: (returnval){ [ 866.097712] env[62522]: value = "task-2415580" [ 866.097712] env[62522]: _type = "Task" [ 866.097712] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.107222] env[62522]: DEBUG oslo_vmware.api [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Task: {'id': task-2415580, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.373670] env[62522]: DEBUG nova.compute.manager [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 866.398508] env[62522]: DEBUG nova.virt.hardware [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 866.398686] env[62522]: DEBUG nova.virt.hardware [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 866.398874] env[62522]: DEBUG nova.virt.hardware [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 866.399091] env[62522]: DEBUG nova.virt.hardware [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 866.399336] env[62522]: DEBUG nova.virt.hardware [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 866.399406] env[62522]: DEBUG nova.virt.hardware [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 866.399616] env[62522]: DEBUG nova.virt.hardware [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 866.399838] env[62522]: DEBUG nova.virt.hardware [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 866.399982] env[62522]: DEBUG nova.virt.hardware [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 866.400213] env[62522]: DEBUG nova.virt.hardware [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 866.400391] env[62522]: DEBUG nova.virt.hardware [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 866.401309] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3248ef53-6bee-47ae-a8b5-75a62b174cce {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.414353] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae20e341-9c32-41e2-a6e5-947cb2bed9cc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.436765] env[62522]: DEBUG nova.scheduler.client.report [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 866.445191] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "refresh_cache-95e4fe36-6830-4fc4-bb53-1e5643c2f95b" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.445331] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquired lock "refresh_cache-95e4fe36-6830-4fc4-bb53-1e5643c2f95b" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.445472] env[62522]: DEBUG nova.network.neutron [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Forcefully refreshing network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 866.493425] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415579, 'name': CreateVM_Task, 'duration_secs': 0.309322} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.493598] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 866.494320] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.494496] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.494808] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 866.497095] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42a25c37-1cd5-4fca-ae06-b16d09b0680d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.502015] env[62522]: DEBUG oslo_vmware.api [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 866.502015] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529425be-3fb0-bac7-7857-969347d6c8f2" [ 866.502015] env[62522]: _type = "Task" [ 866.502015] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.509424] env[62522]: DEBUG oslo_vmware.api [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529425be-3fb0-bac7-7857-969347d6c8f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.609103] env[62522]: DEBUG oslo_vmware.api [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Task: {'id': task-2415580, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.057463} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.609332] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 866.610144] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8d9ade8-8020-4d92-9723-f7d74b3ad986 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.631329] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Reconfiguring VM instance instance-00000036 to attach disk [datastore2] a10c4dee-4490-445a-bea2-9f8ef5425d15/a10c4dee-4490-445a-bea2-9f8ef5425d15.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 866.631943] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ed56a094-9db9-4e39-ba27-37530e04067a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.653186] env[62522]: DEBUG oslo_vmware.api [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Waiting for the task: (returnval){ [ 866.653186] env[62522]: value = "task-2415581" [ 866.653186] env[62522]: _type = "Task" [ 866.653186] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.660894] env[62522]: DEBUG oslo_vmware.api [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Task: {'id': task-2415581, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.682323] env[62522]: DEBUG nova.network.neutron [req-bbed1cca-673b-4603-bcc4-812461043ab4 req-8c3bd1fd-e8d9-4c0d-b0f9-b361a3702353 service nova] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Updated VIF entry in instance network info cache for port d830d64b-94fa-4bc8-a3e6-e45c4b0ae629. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 866.682752] env[62522]: DEBUG nova.network.neutron [req-bbed1cca-673b-4603-bcc4-812461043ab4 req-8c3bd1fd-e8d9-4c0d-b0f9-b361a3702353 service nova] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Updating instance_info_cache with network_info: [{"id": "d830d64b-94fa-4bc8-a3e6-e45c4b0ae629", "address": "fa:16:3e:d4:80:4e", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.185", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd830d64b-94", "ovs_interfaceid": "d830d64b-94fa-4bc8-a3e6-e45c4b0ae629", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.943469] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.606s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.943994] env[62522]: DEBUG nova.compute.manager [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 866.947439] env[62522]: DEBUG oslo_concurrency.lockutils [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.728s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.948081] env[62522]: DEBUG nova.objects.instance [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lazy-loading 'resources' on Instance uuid 19d3d54c-5ba1-420f-b012-a08add8546c9 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 866.971832] env[62522]: DEBUG nova.network.neutron [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 867.010920] env[62522]: DEBUG nova.compute.manager [req-6a5000fa-65da-4c65-ae9a-28912b4dd380 req-f9e8e1e3-3452-4b9d-8da4-daccfdd9f080 service nova] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Received event network-vif-plugged-9d7170cd-1d24-4b21-84d5-6f67ba579199 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 867.010920] env[62522]: DEBUG oslo_concurrency.lockutils [req-6a5000fa-65da-4c65-ae9a-28912b4dd380 req-f9e8e1e3-3452-4b9d-8da4-daccfdd9f080 service nova] Acquiring lock "6ef27aee-719c-4089-825d-fc117e867bde-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 867.010920] env[62522]: DEBUG oslo_concurrency.lockutils [req-6a5000fa-65da-4c65-ae9a-28912b4dd380 req-f9e8e1e3-3452-4b9d-8da4-daccfdd9f080 service nova] Lock "6ef27aee-719c-4089-825d-fc117e867bde-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.010920] env[62522]: DEBUG oslo_concurrency.lockutils [req-6a5000fa-65da-4c65-ae9a-28912b4dd380 req-f9e8e1e3-3452-4b9d-8da4-daccfdd9f080 service nova] Lock "6ef27aee-719c-4089-825d-fc117e867bde-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.011155] env[62522]: DEBUG nova.compute.manager [req-6a5000fa-65da-4c65-ae9a-28912b4dd380 req-f9e8e1e3-3452-4b9d-8da4-daccfdd9f080 service nova] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] No waiting events found dispatching network-vif-plugged-9d7170cd-1d24-4b21-84d5-6f67ba579199 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 867.011490] env[62522]: WARNING nova.compute.manager [req-6a5000fa-65da-4c65-ae9a-28912b4dd380 req-f9e8e1e3-3452-4b9d-8da4-daccfdd9f080 service nova] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Received unexpected event network-vif-plugged-9d7170cd-1d24-4b21-84d5-6f67ba579199 for instance with vm_state building and task_state spawning. [ 867.017195] env[62522]: DEBUG oslo_vmware.api [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529425be-3fb0-bac7-7857-969347d6c8f2, 'name': SearchDatastore_Task, 'duration_secs': 0.009344} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.017659] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.017960] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 867.018202] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.018417] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.018589] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 867.018875] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ebd9b631-466f-4f16-9a41-eb933b33b31e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.028380] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 867.028555] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 867.029279] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06e5f7e9-768a-4235-99e3-e63df2b039f1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.037757] env[62522]: DEBUG oslo_vmware.api [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 867.037757] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b95658-98fb-174e-51ac-2899f1d55189" [ 867.037757] env[62522]: _type = "Task" [ 867.037757] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.046235] env[62522]: DEBUG oslo_vmware.api [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b95658-98fb-174e-51ac-2899f1d55189, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.163212] env[62522]: DEBUG oslo_vmware.api [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Task: {'id': task-2415581, 'name': ReconfigVM_Task, 'duration_secs': 0.270189} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.163580] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Reconfigured VM instance instance-00000036 to attach disk [datastore2] a10c4dee-4490-445a-bea2-9f8ef5425d15/a10c4dee-4490-445a-bea2-9f8ef5425d15.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 867.164273] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c7dd07f0-81cc-4cc2-9533-850ba1a8df46 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.170930] env[62522]: DEBUG oslo_vmware.api [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Waiting for the task: (returnval){ [ 867.170930] env[62522]: value = "task-2415582" [ 867.170930] env[62522]: _type = "Task" [ 867.170930] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.179096] env[62522]: DEBUG oslo_vmware.api [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Task: {'id': task-2415582, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.185683] env[62522]: DEBUG oslo_concurrency.lockutils [req-bbed1cca-673b-4603-bcc4-812461043ab4 req-8c3bd1fd-e8d9-4c0d-b0f9-b361a3702353 service nova] Releasing lock "refresh_cache-74e52638-d284-4bd1-8cff-c7aca9426f75" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.357970] env[62522]: DEBUG nova.network.neutron [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Successfully updated port: 9d7170cd-1d24-4b21-84d5-6f67ba579199 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 867.449309] env[62522]: DEBUG nova.compute.utils [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 867.450749] env[62522]: DEBUG nova.compute.manager [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 867.450992] env[62522]: DEBUG nova.network.neutron [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 867.552170] env[62522]: DEBUG nova.policy [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc1c3efee2cd4153a413daebd5ceefed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cd5f87d0072e4d63acd3d201754f72f6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 867.554472] env[62522]: DEBUG oslo_vmware.api [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b95658-98fb-174e-51ac-2899f1d55189, 'name': SearchDatastore_Task, 'duration_secs': 0.009575} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.554879] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7bc1d5e1-2838-4422-afd2-ad5212480896 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.560294] env[62522]: DEBUG oslo_vmware.api [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 867.560294] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52281e9c-5870-fc15-eb0f-e9e4a6e4798a" [ 867.560294] env[62522]: _type = "Task" [ 867.560294] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.569514] env[62522]: DEBUG oslo_vmware.api [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52281e9c-5870-fc15-eb0f-e9e4a6e4798a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.579788] env[62522]: DEBUG nova.network.neutron [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.681309] env[62522]: DEBUG oslo_vmware.api [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Task: {'id': task-2415582, 'name': Rename_Task, 'duration_secs': 0.135483} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.681591] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 867.681842] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ea560979-88bf-40a9-83a0-8d23f6da7eb7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.688226] env[62522]: DEBUG oslo_vmware.api [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Waiting for the task: (returnval){ [ 867.688226] env[62522]: value = "task-2415583" [ 867.688226] env[62522]: _type = "Task" [ 867.688226] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.696052] env[62522]: DEBUG oslo_vmware.api [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Task: {'id': task-2415583, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.861702] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Acquiring lock "refresh_cache-6ef27aee-719c-4089-825d-fc117e867bde" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.863043] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Acquired lock "refresh_cache-6ef27aee-719c-4089-825d-fc117e867bde" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.863043] env[62522]: DEBUG nova.network.neutron [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 867.956574] env[62522]: DEBUG nova.compute.manager [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 867.982862] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54b769ce-b33b-4b54-a4d7-0d835355c7f6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.990484] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-002ac704-dd55-4b8a-815b-173776aa18f8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.994668] env[62522]: DEBUG nova.network.neutron [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Successfully created port: b635a257-729f-4428-9bb4-d56e3bb92bf2 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 868.024507] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d106c27c-47af-458f-910a-db95fe2d4c58 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.032708] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b8c5b8-33ce-4010-bfe2-93ca10e1329b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.046644] env[62522]: DEBUG nova.compute.provider_tree [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 868.071044] env[62522]: DEBUG oslo_vmware.api [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52281e9c-5870-fc15-eb0f-e9e4a6e4798a, 'name': SearchDatastore_Task, 'duration_secs': 0.009016} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.071316] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.071572] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 74e52638-d284-4bd1-8cff-c7aca9426f75/74e52638-d284-4bd1-8cff-c7aca9426f75.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 868.074024] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4ae184a2-d7b3-4841-83f8-faf805d2f610 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.078795] env[62522]: DEBUG oslo_vmware.api [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 868.078795] env[62522]: value = "task-2415584" [ 868.078795] env[62522]: _type = "Task" [ 868.078795] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.087055] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Releasing lock "refresh_cache-95e4fe36-6830-4fc4-bb53-1e5643c2f95b" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.087252] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Updated the network info_cache for instance {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 868.087472] env[62522]: DEBUG oslo_vmware.api [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415584, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.087689] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 868.087946] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 868.088163] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 868.088438] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 868.088520] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 868.088670] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 868.088798] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62522) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 868.088943] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 868.201361] env[62522]: DEBUG oslo_vmware.api [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Task: {'id': task-2415583, 'name': PowerOnVM_Task, 'duration_secs': 0.464607} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.201721] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 868.201973] env[62522]: INFO nova.compute.manager [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Took 7.00 seconds to spawn the instance on the hypervisor. [ 868.202226] env[62522]: DEBUG nova.compute.manager [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 868.203075] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-345ad6b2-9feb-403a-9b4f-1adbecb671d5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.406552] env[62522]: DEBUG nova.network.neutron [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 868.550030] env[62522]: DEBUG nova.scheduler.client.report [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 868.584752] env[62522]: DEBUG nova.network.neutron [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Updating instance_info_cache with network_info: [{"id": "9d7170cd-1d24-4b21-84d5-6f67ba579199", "address": "fa:16:3e:e3:99:c7", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.92", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d7170cd-1d", "ovs_interfaceid": "9d7170cd-1d24-4b21-84d5-6f67ba579199", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.592688] env[62522]: DEBUG oslo_vmware.api [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415584, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.595237] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.723086] env[62522]: INFO nova.compute.manager [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Took 44.97 seconds to build instance. [ 868.967560] env[62522]: DEBUG nova.compute.manager [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 868.994781] env[62522]: DEBUG nova.virt.hardware [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 868.995042] env[62522]: DEBUG nova.virt.hardware [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 868.995357] env[62522]: DEBUG nova.virt.hardware [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 868.995578] env[62522]: DEBUG nova.virt.hardware [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 868.995728] env[62522]: DEBUG nova.virt.hardware [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 868.995875] env[62522]: DEBUG nova.virt.hardware [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 868.996093] env[62522]: DEBUG nova.virt.hardware [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 868.996254] env[62522]: DEBUG nova.virt.hardware [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 868.996419] env[62522]: DEBUG nova.virt.hardware [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 868.996577] env[62522]: DEBUG nova.virt.hardware [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 868.996744] env[62522]: DEBUG nova.virt.hardware [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 868.997619] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cd68baa-9af2-42dd-8e14-54cef0088a5e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.005942] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a959b4ab-5b4c-4b0e-a881-835b2087158f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.034863] env[62522]: DEBUG nova.compute.manager [req-1420244d-ee94-4857-9a5d-877dcfaca051 req-c2d13185-0dee-469a-b0b0-63e07fe051b1 service nova] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Received event network-changed-9d7170cd-1d24-4b21-84d5-6f67ba579199 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 869.035078] env[62522]: DEBUG nova.compute.manager [req-1420244d-ee94-4857-9a5d-877dcfaca051 req-c2d13185-0dee-469a-b0b0-63e07fe051b1 service nova] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Refreshing instance network info cache due to event network-changed-9d7170cd-1d24-4b21-84d5-6f67ba579199. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 869.035270] env[62522]: DEBUG oslo_concurrency.lockutils [req-1420244d-ee94-4857-9a5d-877dcfaca051 req-c2d13185-0dee-469a-b0b0-63e07fe051b1 service nova] Acquiring lock "refresh_cache-6ef27aee-719c-4089-825d-fc117e867bde" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.055158] env[62522]: DEBUG oslo_concurrency.lockutils [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.108s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.057930] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.516s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.057930] env[62522]: DEBUG nova.objects.instance [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Lazy-loading 'resources' on Instance uuid a185273e-cdaf-4967-832b-f75014b7b3f4 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 869.080675] env[62522]: INFO nova.scheduler.client.report [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Deleted allocations for instance 19d3d54c-5ba1-420f-b012-a08add8546c9 [ 869.087404] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Releasing lock "refresh_cache-6ef27aee-719c-4089-825d-fc117e867bde" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.087719] env[62522]: DEBUG nova.compute.manager [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Instance network_info: |[{"id": "9d7170cd-1d24-4b21-84d5-6f67ba579199", "address": "fa:16:3e:e3:99:c7", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.92", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d7170cd-1d", "ovs_interfaceid": "9d7170cd-1d24-4b21-84d5-6f67ba579199", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 869.088599] env[62522]: DEBUG oslo_concurrency.lockutils [req-1420244d-ee94-4857-9a5d-877dcfaca051 req-c2d13185-0dee-469a-b0b0-63e07fe051b1 service nova] Acquired lock "refresh_cache-6ef27aee-719c-4089-825d-fc117e867bde" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.088788] env[62522]: DEBUG nova.network.neutron [req-1420244d-ee94-4857-9a5d-877dcfaca051 req-c2d13185-0dee-469a-b0b0-63e07fe051b1 service nova] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Refreshing network info cache for port 9d7170cd-1d24-4b21-84d5-6f67ba579199 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 869.089856] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e3:99:c7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f6d1427-d86b-4371-9172-50e4bb0eb1cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9d7170cd-1d24-4b21-84d5-6f67ba579199', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 869.097473] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Creating folder: Project (cd5f87d0072e4d63acd3d201754f72f6). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 869.103494] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-659f1f87-db18-411c-9a3f-95135a0b16e3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.105880] env[62522]: DEBUG oslo_vmware.api [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415584, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.517805} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.106235] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 74e52638-d284-4bd1-8cff-c7aca9426f75/74e52638-d284-4bd1-8cff-c7aca9426f75.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 869.106469] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 869.108194] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0ea05d70-dcb3-4307-9015-2827133ea480 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.115203] env[62522]: DEBUG oslo_vmware.api [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 869.115203] env[62522]: value = "task-2415586" [ 869.115203] env[62522]: _type = "Task" [ 869.115203] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.119652] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Created folder: Project (cd5f87d0072e4d63acd3d201754f72f6) in parent group-v489562. [ 869.119904] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Creating folder: Instances. Parent ref: group-v489721. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 869.120488] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0c1a9ddc-b237-4cfc-a8b5-b6af224e7ece {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.125089] env[62522]: DEBUG oslo_vmware.api [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415586, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.136344] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Created folder: Instances in parent group-v489721. [ 869.136629] env[62522]: DEBUG oslo.service.loopingcall [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 869.139986] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 869.140486] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a7cd983c-457c-4ad1-b7d5-86bf11cb1261 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.160122] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 869.160122] env[62522]: value = "task-2415588" [ 869.160122] env[62522]: _type = "Task" [ 869.160122] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.167764] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415588, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.224250] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cfd5c19-ef5c-4bea-a94a-90573e59d68e tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Lock "a10c4dee-4490-445a-bea2-9f8ef5425d15" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 96.403s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.334851] env[62522]: DEBUG nova.network.neutron [req-1420244d-ee94-4857-9a5d-877dcfaca051 req-c2d13185-0dee-469a-b0b0-63e07fe051b1 service nova] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Updated VIF entry in instance network info cache for port 9d7170cd-1d24-4b21-84d5-6f67ba579199. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 869.335462] env[62522]: DEBUG nova.network.neutron [req-1420244d-ee94-4857-9a5d-877dcfaca051 req-c2d13185-0dee-469a-b0b0-63e07fe051b1 service nova] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Updating instance_info_cache with network_info: [{"id": "9d7170cd-1d24-4b21-84d5-6f67ba579199", "address": "fa:16:3e:e3:99:c7", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.92", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d7170cd-1d", "ovs_interfaceid": "9d7170cd-1d24-4b21-84d5-6f67ba579199", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.593241] env[62522]: DEBUG oslo_concurrency.lockutils [None req-015c1d5a-da5f-4f38-b1ce-6666817308dd tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "19d3d54c-5ba1-420f-b012-a08add8546c9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.686s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.596112] env[62522]: DEBUG oslo_concurrency.lockutils [req-a28a8a4d-b7c6-40de-aa8c-69e64dd29aae req-de45deda-ccbf-42de-9a85-7b05a8f4c3e5 service nova] Acquired lock "19d3d54c-5ba1-420f-b012-a08add8546c9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.596112] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69c8aa6a-5228-4469-8203-4518b5b5798b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.604226] env[62522]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 869.609039] env[62522]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=62522) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 869.609039] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-eedf3cdb-8bf4-4993-9ec6-d692ec1747cb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.618437] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87bd7ea1-5677-4327-8a96-9072c96e56ce {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.644596] env[62522]: DEBUG oslo_vmware.api [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415586, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.267436} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.647786] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 869.660743] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ae30d2-27a6-40b0-8399-314cffdb5884 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.665286] env[62522]: ERROR root [req-a28a8a4d-b7c6-40de-aa8c-69e64dd29aae req-de45deda-ccbf-42de-9a85-7b05a8f4c3e5 service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-489625' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 480, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-489625' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-489625' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-489625'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-489625' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-489625' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-489625'}\n"]: nova.exception.InstanceNotFound: Instance 19d3d54c-5ba1-420f-b012-a08add8546c9 could not be found. [ 869.665747] env[62522]: DEBUG oslo_concurrency.lockutils [req-a28a8a4d-b7c6-40de-aa8c-69e64dd29aae req-de45deda-ccbf-42de-9a85-7b05a8f4c3e5 service nova] Releasing lock "19d3d54c-5ba1-420f-b012-a08add8546c9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.666130] env[62522]: DEBUG nova.compute.manager [req-a28a8a4d-b7c6-40de-aa8c-69e64dd29aae req-de45deda-ccbf-42de-9a85-7b05a8f4c3e5 service nova] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Detach interface failed, port_id=6f83c77d-45cc-446e-8a38-eb8a94e38f59, reason: Instance 19d3d54c-5ba1-420f-b012-a08add8546c9 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 869.666475] env[62522]: DEBUG nova.compute.manager [req-a28a8a4d-b7c6-40de-aa8c-69e64dd29aae req-de45deda-ccbf-42de-9a85-7b05a8f4c3e5 service nova] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Received event network-vif-plugged-71039daa-ce8b-462d-b9f3-8e07f9ec2666 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 869.666787] env[62522]: DEBUG oslo_concurrency.lockutils [req-a28a8a4d-b7c6-40de-aa8c-69e64dd29aae req-de45deda-ccbf-42de-9a85-7b05a8f4c3e5 service nova] Acquiring lock "74e663b1-b552-4b71-aa74-308e908d79e7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.667283] env[62522]: DEBUG oslo_concurrency.lockutils [req-a28a8a4d-b7c6-40de-aa8c-69e64dd29aae req-de45deda-ccbf-42de-9a85-7b05a8f4c3e5 service nova] Lock "74e663b1-b552-4b71-aa74-308e908d79e7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.667948] env[62522]: DEBUG oslo_concurrency.lockutils [req-a28a8a4d-b7c6-40de-aa8c-69e64dd29aae req-de45deda-ccbf-42de-9a85-7b05a8f4c3e5 service nova] Lock "74e663b1-b552-4b71-aa74-308e908d79e7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.667948] env[62522]: DEBUG nova.compute.manager [req-a28a8a4d-b7c6-40de-aa8c-69e64dd29aae req-de45deda-ccbf-42de-9a85-7b05a8f4c3e5 service nova] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] No waiting events found dispatching network-vif-plugged-71039daa-ce8b-462d-b9f3-8e07f9ec2666 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 869.668094] env[62522]: WARNING nova.compute.manager [req-a28a8a4d-b7c6-40de-aa8c-69e64dd29aae req-de45deda-ccbf-42de-9a85-7b05a8f4c3e5 service nova] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Received unexpected event network-vif-plugged-71039daa-ce8b-462d-b9f3-8e07f9ec2666 for instance with vm_state building and task_state spawning. [ 869.727283] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Reconfiguring VM instance instance-00000037 to attach disk [datastore2] 74e52638-d284-4bd1-8cff-c7aca9426f75/74e52638-d284-4bd1-8cff-c7aca9426f75.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 869.727283] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a96f600c-b958-4c38-aa85-b742913e32c3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.727283] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415588, 'name': CreateVM_Task, 'duration_secs': 0.445357} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.727283] env[62522]: DEBUG nova.network.neutron [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Successfully updated port: b635a257-729f-4428-9bb4-d56e3bb92bf2 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 869.727283] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 869.727283] env[62522]: DEBUG nova.compute.manager [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 869.734023] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.734023] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.734023] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 869.734023] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6141e423-9d9e-42a7-a2b2-18093949d893 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.736897] env[62522]: DEBUG oslo_vmware.api [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 869.736897] env[62522]: value = "task-2415589" [ 869.736897] env[62522]: _type = "Task" [ 869.736897] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.738381] env[62522]: DEBUG oslo_vmware.api [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the task: (returnval){ [ 869.738381] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]527f0c45-c957-e81e-7b6b-ca07c3e340dd" [ 869.738381] env[62522]: _type = "Task" [ 869.738381] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.766310] env[62522]: DEBUG oslo_vmware.api [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415589, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.766606] env[62522]: DEBUG oslo_vmware.api [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]527f0c45-c957-e81e-7b6b-ca07c3e340dd, 'name': SearchDatastore_Task, 'duration_secs': 0.010332} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.770036] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.770302] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 869.770589] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.770787] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.771027] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 869.771491] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-60469078-f9d9-484c-b69b-b363b3ebbf3c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.781490] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 869.781677] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 869.782425] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-530c3747-ec0b-4566-85eb-9faa25b3358a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.790118] env[62522]: DEBUG oslo_vmware.api [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the task: (returnval){ [ 869.790118] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d81fa4-f634-267f-f5d9-b2a46e962a72" [ 869.790118] env[62522]: _type = "Task" [ 869.790118] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.799052] env[62522]: DEBUG oslo_vmware.api [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d81fa4-f634-267f-f5d9-b2a46e962a72, 'name': SearchDatastore_Task, 'duration_secs': 0.008766} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.802176] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2489081-7fbf-4d0d-ada2-51f5daa680e0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.806996] env[62522]: DEBUG oslo_vmware.api [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the task: (returnval){ [ 869.806996] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b7112f-84c8-d767-b33f-f644cbdeedb6" [ 869.806996] env[62522]: _type = "Task" [ 869.806996] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.816591] env[62522]: DEBUG oslo_vmware.api [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b7112f-84c8-d767-b33f-f644cbdeedb6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.840259] env[62522]: DEBUG oslo_concurrency.lockutils [req-1420244d-ee94-4857-9a5d-877dcfaca051 req-c2d13185-0dee-469a-b0b0-63e07fe051b1 service nova] Releasing lock "refresh_cache-6ef27aee-719c-4089-825d-fc117e867bde" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.945847] env[62522]: DEBUG nova.compute.manager [req-7135d8a7-4162-45ba-a840-ca09e855dbfc req-1772561e-290e-48e4-a94f-38f72a610ea3 service nova] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Received event network-changed-32b31ef7-1045-4a88-9dba-5d5e5040efc2 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 869.946083] env[62522]: DEBUG nova.compute.manager [req-7135d8a7-4162-45ba-a840-ca09e855dbfc req-1772561e-290e-48e4-a94f-38f72a610ea3 service nova] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Refreshing instance network info cache due to event network-changed-32b31ef7-1045-4a88-9dba-5d5e5040efc2. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 869.946296] env[62522]: DEBUG oslo_concurrency.lockutils [req-7135d8a7-4162-45ba-a840-ca09e855dbfc req-1772561e-290e-48e4-a94f-38f72a610ea3 service nova] Acquiring lock "refresh_cache-a10c4dee-4490-445a-bea2-9f8ef5425d15" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.946440] env[62522]: DEBUG oslo_concurrency.lockutils [req-7135d8a7-4162-45ba-a840-ca09e855dbfc req-1772561e-290e-48e4-a94f-38f72a610ea3 service nova] Acquired lock "refresh_cache-a10c4dee-4490-445a-bea2-9f8ef5425d15" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.946597] env[62522]: DEBUG nova.network.neutron [req-7135d8a7-4162-45ba-a840-ca09e855dbfc req-1772561e-290e-48e4-a94f-38f72a610ea3 service nova] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Refreshing network info cache for port 32b31ef7-1045-4a88-9dba-5d5e5040efc2 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 870.160750] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32b24c5f-6f63-4f14-a8c2-b2ec4e8b1167 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.168473] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1475e33-5e8c-4d2b-ab7a-d177af80cc34 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.199679] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b625e685-8e15-42b4-9f5b-3ac9530294a3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.207481] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a54b84a-e8e0-4f53-86d4-effa0dc05fa0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.221845] env[62522]: DEBUG nova.compute.provider_tree [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 870.231769] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Acquiring lock "refresh_cache-ff6637e9-2a67-4302-9769-24ec045538d4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 870.231769] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Acquired lock "refresh_cache-ff6637e9-2a67-4302-9769-24ec045538d4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.231769] env[62522]: DEBUG nova.network.neutron [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 870.247508] env[62522]: DEBUG oslo_vmware.api [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415589, 'name': ReconfigVM_Task, 'duration_secs': 0.288304} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.248413] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 870.248687] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Reconfigured VM instance instance-00000037 to attach disk [datastore2] 74e52638-d284-4bd1-8cff-c7aca9426f75/74e52638-d284-4bd1-8cff-c7aca9426f75.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 870.249299] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9bb95365-ae50-457a-89ff-c36e12c94277 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.255705] env[62522]: DEBUG oslo_vmware.api [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 870.255705] env[62522]: value = "task-2415590" [ 870.255705] env[62522]: _type = "Task" [ 870.255705] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.265192] env[62522]: DEBUG oslo_vmware.api [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415590, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.321343] env[62522]: DEBUG oslo_vmware.api [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b7112f-84c8-d767-b33f-f644cbdeedb6, 'name': SearchDatastore_Task, 'duration_secs': 0.008376} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.321446] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.321729] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 6ef27aee-719c-4089-825d-fc117e867bde/6ef27aee-719c-4089-825d-fc117e867bde.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 870.322012] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c50e4682-e6af-45a9-a334-4136ef56befc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.329218] env[62522]: DEBUG oslo_vmware.api [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the task: (returnval){ [ 870.329218] env[62522]: value = "task-2415591" [ 870.329218] env[62522]: _type = "Task" [ 870.329218] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.337774] env[62522]: DEBUG oslo_vmware.api [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415591, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.730030] env[62522]: DEBUG nova.scheduler.client.report [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 870.766608] env[62522]: DEBUG oslo_vmware.api [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415590, 'name': Rename_Task, 'duration_secs': 0.176127} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.766888] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 870.767170] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c74d3473-78b5-4b5e-a2fc-424d31bd06e6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.775806] env[62522]: DEBUG oslo_vmware.api [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 870.775806] env[62522]: value = "task-2415592" [ 870.775806] env[62522]: _type = "Task" [ 870.775806] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.788645] env[62522]: DEBUG oslo_vmware.api [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415592, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.789597] env[62522]: DEBUG nova.network.neutron [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 870.839068] env[62522]: DEBUG oslo_vmware.api [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415591, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.851587] env[62522]: DEBUG nova.network.neutron [req-7135d8a7-4162-45ba-a840-ca09e855dbfc req-1772561e-290e-48e4-a94f-38f72a610ea3 service nova] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Updated VIF entry in instance network info cache for port 32b31ef7-1045-4a88-9dba-5d5e5040efc2. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 870.851980] env[62522]: DEBUG nova.network.neutron [req-7135d8a7-4162-45ba-a840-ca09e855dbfc req-1772561e-290e-48e4-a94f-38f72a610ea3 service nova] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Updating instance_info_cache with network_info: [{"id": "32b31ef7-1045-4a88-9dba-5d5e5040efc2", "address": "fa:16:3e:44:8d:78", "network": {"id": "214c091d-98dd-489b-8152-8da8572345b9", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-231986862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.203", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62105741677343cd9177bf00786ae761", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e30245c5-78f5-48e6-b504-c6c21f5a9b45", "external-id": "nsx-vlan-transportzone-409", "segmentation_id": 409, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32b31ef7-10", "ovs_interfaceid": "32b31ef7-1045-4a88-9dba-5d5e5040efc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.122142] env[62522]: DEBUG nova.compute.manager [req-a9984587-88df-4d8b-8169-4d8aa9cde12d req-f046a380-e8c8-4253-9f26-a7cbdc7ccf0e service nova] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Received event network-vif-plugged-b635a257-729f-4428-9bb4-d56e3bb92bf2 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 871.122291] env[62522]: DEBUG oslo_concurrency.lockutils [req-a9984587-88df-4d8b-8169-4d8aa9cde12d req-f046a380-e8c8-4253-9f26-a7cbdc7ccf0e service nova] Acquiring lock "ff6637e9-2a67-4302-9769-24ec045538d4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.122492] env[62522]: DEBUG oslo_concurrency.lockutils [req-a9984587-88df-4d8b-8169-4d8aa9cde12d req-f046a380-e8c8-4253-9f26-a7cbdc7ccf0e service nova] Lock "ff6637e9-2a67-4302-9769-24ec045538d4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.123672] env[62522]: DEBUG oslo_concurrency.lockutils [req-a9984587-88df-4d8b-8169-4d8aa9cde12d req-f046a380-e8c8-4253-9f26-a7cbdc7ccf0e service nova] Lock "ff6637e9-2a67-4302-9769-24ec045538d4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.123672] env[62522]: DEBUG nova.compute.manager [req-a9984587-88df-4d8b-8169-4d8aa9cde12d req-f046a380-e8c8-4253-9f26-a7cbdc7ccf0e service nova] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] No waiting events found dispatching network-vif-plugged-b635a257-729f-4428-9bb4-d56e3bb92bf2 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 871.123672] env[62522]: WARNING nova.compute.manager [req-a9984587-88df-4d8b-8169-4d8aa9cde12d req-f046a380-e8c8-4253-9f26-a7cbdc7ccf0e service nova] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Received unexpected event network-vif-plugged-b635a257-729f-4428-9bb4-d56e3bb92bf2 for instance with vm_state building and task_state spawning. [ 871.123672] env[62522]: DEBUG nova.compute.manager [req-a9984587-88df-4d8b-8169-4d8aa9cde12d req-f046a380-e8c8-4253-9f26-a7cbdc7ccf0e service nova] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Received event network-changed-b635a257-729f-4428-9bb4-d56e3bb92bf2 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 871.123672] env[62522]: DEBUG nova.compute.manager [req-a9984587-88df-4d8b-8169-4d8aa9cde12d req-f046a380-e8c8-4253-9f26-a7cbdc7ccf0e service nova] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Refreshing instance network info cache due to event network-changed-b635a257-729f-4428-9bb4-d56e3bb92bf2. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 871.123672] env[62522]: DEBUG oslo_concurrency.lockutils [req-a9984587-88df-4d8b-8169-4d8aa9cde12d req-f046a380-e8c8-4253-9f26-a7cbdc7ccf0e service nova] Acquiring lock "refresh_cache-ff6637e9-2a67-4302-9769-24ec045538d4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 871.168534] env[62522]: DEBUG nova.network.neutron [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Updating instance_info_cache with network_info: [{"id": "b635a257-729f-4428-9bb4-d56e3bb92bf2", "address": "fa:16:3e:45:67:f1", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.158", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb635a257-72", "ovs_interfaceid": "b635a257-729f-4428-9bb4-d56e3bb92bf2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.238465] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.181s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.241579] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.396s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.243217] env[62522]: INFO nova.compute.claims [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 871.263679] env[62522]: INFO nova.scheduler.client.report [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Deleted allocations for instance a185273e-cdaf-4967-832b-f75014b7b3f4 [ 871.287793] env[62522]: DEBUG oslo_vmware.api [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415592, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.342794] env[62522]: DEBUG oslo_vmware.api [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415591, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.541372} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.347355] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 6ef27aee-719c-4089-825d-fc117e867bde/6ef27aee-719c-4089-825d-fc117e867bde.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 871.347603] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 871.347917] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dd230332-0d58-42d2-a2a7-2f2ef537b4d9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.355781] env[62522]: DEBUG oslo_vmware.api [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the task: (returnval){ [ 871.355781] env[62522]: value = "task-2415593" [ 871.355781] env[62522]: _type = "Task" [ 871.355781] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.356421] env[62522]: DEBUG oslo_concurrency.lockutils [req-7135d8a7-4162-45ba-a840-ca09e855dbfc req-1772561e-290e-48e4-a94f-38f72a610ea3 service nova] Releasing lock "refresh_cache-a10c4dee-4490-445a-bea2-9f8ef5425d15" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 871.366394] env[62522]: DEBUG oslo_vmware.api [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415593, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.671376] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Releasing lock "refresh_cache-ff6637e9-2a67-4302-9769-24ec045538d4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 871.671827] env[62522]: DEBUG nova.compute.manager [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Instance network_info: |[{"id": "b635a257-729f-4428-9bb4-d56e3bb92bf2", "address": "fa:16:3e:45:67:f1", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.158", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb635a257-72", "ovs_interfaceid": "b635a257-729f-4428-9bb4-d56e3bb92bf2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 871.672239] env[62522]: DEBUG oslo_concurrency.lockutils [req-a9984587-88df-4d8b-8169-4d8aa9cde12d req-f046a380-e8c8-4253-9f26-a7cbdc7ccf0e service nova] Acquired lock "refresh_cache-ff6637e9-2a67-4302-9769-24ec045538d4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.672421] env[62522]: DEBUG nova.network.neutron [req-a9984587-88df-4d8b-8169-4d8aa9cde12d req-f046a380-e8c8-4253-9f26-a7cbdc7ccf0e service nova] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Refreshing network info cache for port b635a257-729f-4428-9bb4-d56e3bb92bf2 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 871.674236] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:67:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f6d1427-d86b-4371-9172-50e4bb0eb1cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b635a257-729f-4428-9bb4-d56e3bb92bf2', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 871.682630] env[62522]: DEBUG oslo.service.loopingcall [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 871.683649] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 871.683934] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-83f676fa-654f-4718-9a95-861322835ea0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.703549] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 871.703549] env[62522]: value = "task-2415594" [ 871.703549] env[62522]: _type = "Task" [ 871.703549] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.710852] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415594, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.771866] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c93b43ba-d0cf-4afd-9e58-08a8d457c113 tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Lock "a185273e-cdaf-4967-832b-f75014b7b3f4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.146s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.786180] env[62522]: DEBUG oslo_vmware.api [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415592, 'name': PowerOnVM_Task, 'duration_secs': 0.584384} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.786444] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 871.786642] env[62522]: INFO nova.compute.manager [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Took 8.03 seconds to spawn the instance on the hypervisor. [ 871.786819] env[62522]: DEBUG nova.compute.manager [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 871.788236] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27da1d6d-bdd8-4705-8b35-22f986a04fc3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.865415] env[62522]: DEBUG oslo_vmware.api [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415593, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.332791} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.865722] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 871.866526] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdd96947-f5ef-4b65-9fb0-e7d2b021c542 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.890130] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Reconfiguring VM instance instance-00000038 to attach disk [datastore2] 6ef27aee-719c-4089-825d-fc117e867bde/6ef27aee-719c-4089-825d-fc117e867bde.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 871.890130] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dce473d3-e0fa-4f3f-9a98-14976a808cc2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.911850] env[62522]: DEBUG oslo_vmware.api [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the task: (returnval){ [ 871.911850] env[62522]: value = "task-2415595" [ 871.911850] env[62522]: _type = "Task" [ 871.911850] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.920311] env[62522]: DEBUG oslo_vmware.api [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415595, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.213549] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415594, 'name': CreateVM_Task, 'duration_secs': 0.363089} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.215730] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 872.216450] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 872.216689] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.217020] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 872.217819] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be618538-8ea1-4ae9-9729-0f0ecf6a7e50 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.222294] env[62522]: DEBUG oslo_vmware.api [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the task: (returnval){ [ 872.222294] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]527b1de2-94fc-a8eb-c31f-3f2ea94c93dc" [ 872.222294] env[62522]: _type = "Task" [ 872.222294] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.229670] env[62522]: DEBUG oslo_vmware.api [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]527b1de2-94fc-a8eb-c31f-3f2ea94c93dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.317193] env[62522]: INFO nova.compute.manager [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Took 45.71 seconds to build instance. [ 872.422840] env[62522]: DEBUG oslo_vmware.api [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415595, 'name': ReconfigVM_Task, 'duration_secs': 0.330392} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.423165] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Reconfigured VM instance instance-00000038 to attach disk [datastore2] 6ef27aee-719c-4089-825d-fc117e867bde/6ef27aee-719c-4089-825d-fc117e867bde.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 872.423798] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bbc5886c-f542-48bb-a154-0d7d982c60b7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.430030] env[62522]: DEBUG oslo_vmware.api [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the task: (returnval){ [ 872.430030] env[62522]: value = "task-2415596" [ 872.430030] env[62522]: _type = "Task" [ 872.430030] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.437985] env[62522]: DEBUG oslo_vmware.api [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415596, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.447379] env[62522]: DEBUG nova.network.neutron [req-a9984587-88df-4d8b-8169-4d8aa9cde12d req-f046a380-e8c8-4253-9f26-a7cbdc7ccf0e service nova] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Updated VIF entry in instance network info cache for port b635a257-729f-4428-9bb4-d56e3bb92bf2. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 872.448141] env[62522]: DEBUG nova.network.neutron [req-a9984587-88df-4d8b-8169-4d8aa9cde12d req-f046a380-e8c8-4253-9f26-a7cbdc7ccf0e service nova] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Updating instance_info_cache with network_info: [{"id": "b635a257-729f-4428-9bb4-d56e3bb92bf2", "address": "fa:16:3e:45:67:f1", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.158", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb635a257-72", "ovs_interfaceid": "b635a257-729f-4428-9bb4-d56e3bb92bf2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.591337] env[62522]: DEBUG oslo_concurrency.lockutils [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "fcd0eef6-d059-4495-a982-058b6c9626d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.591563] env[62522]: DEBUG oslo_concurrency.lockutils [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "fcd0eef6-d059-4495-a982-058b6c9626d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 872.736628] env[62522]: DEBUG oslo_vmware.api [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]527b1de2-94fc-a8eb-c31f-3f2ea94c93dc, 'name': SearchDatastore_Task, 'duration_secs': 0.032822} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.736943] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 872.737255] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 872.737689] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 872.737882] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.738100] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 872.738542] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d9a65a81-4503-426f-b491-eb67e898dd39 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.752766] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 872.752766] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 872.753632] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3d1b8dc-2c2b-4e53-9f82-e7591af0e1a0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.762024] env[62522]: DEBUG oslo_vmware.api [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the task: (returnval){ [ 872.762024] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a2e60c-37d9-fefb-74f1-5e8852d04971" [ 872.762024] env[62522]: _type = "Task" [ 872.762024] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.770139] env[62522]: DEBUG oslo_vmware.api [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a2e60c-37d9-fefb-74f1-5e8852d04971, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.777114] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae9d438-82a5-44fb-95fc-875465b6cb66 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.782613] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4c1a11d-ab03-416f-8639-534d74949379 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.815393] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7df2091e-2f24-4de6-96ad-22c773f13521 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.820719] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3a43f750-ff2b-4ab0-a221-5df5ec04167b tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "74e52638-d284-4bd1-8cff-c7aca9426f75" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.541s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 872.825747] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af1de79e-166a-40ce-a7cf-5a57ad3497e0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.841904] env[62522]: DEBUG nova.compute.provider_tree [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 872.941194] env[62522]: DEBUG oslo_vmware.api [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415596, 'name': Rename_Task, 'duration_secs': 0.157529} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.941979] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 872.942424] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-97511b2c-884e-431a-9f29-d16f49ca0882 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.949919] env[62522]: DEBUG oslo_vmware.api [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the task: (returnval){ [ 872.949919] env[62522]: value = "task-2415597" [ 872.949919] env[62522]: _type = "Task" [ 872.949919] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.954901] env[62522]: DEBUG oslo_concurrency.lockutils [req-a9984587-88df-4d8b-8169-4d8aa9cde12d req-f046a380-e8c8-4253-9f26-a7cbdc7ccf0e service nova] Releasing lock "refresh_cache-ff6637e9-2a67-4302-9769-24ec045538d4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 872.961307] env[62522]: DEBUG oslo_vmware.api [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415597, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.272863] env[62522]: DEBUG oslo_vmware.api [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a2e60c-37d9-fefb-74f1-5e8852d04971, 'name': SearchDatastore_Task, 'duration_secs': 0.022022} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.273682] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71193a25-7587-4727-a2ba-bd0bf8e10d26 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.279156] env[62522]: DEBUG oslo_vmware.api [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the task: (returnval){ [ 873.279156] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5284b3dc-2a6f-3798-53f5-e8197971b895" [ 873.279156] env[62522]: _type = "Task" [ 873.279156] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.288771] env[62522]: DEBUG oslo_vmware.api [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5284b3dc-2a6f-3798-53f5-e8197971b895, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.331615] env[62522]: DEBUG nova.compute.manager [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 873.345138] env[62522]: DEBUG nova.scheduler.client.report [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 873.470191] env[62522]: DEBUG oslo_vmware.api [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415597, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.791549] env[62522]: DEBUG oslo_vmware.api [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5284b3dc-2a6f-3798-53f5-e8197971b895, 'name': SearchDatastore_Task, 'duration_secs': 0.019561} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.791853] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 873.792130] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] ff6637e9-2a67-4302-9769-24ec045538d4/ff6637e9-2a67-4302-9769-24ec045538d4.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 873.792421] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9a7b8b52-2a91-4f23-8ee1-f1d3e4605397 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.800116] env[62522]: DEBUG oslo_vmware.api [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the task: (returnval){ [ 873.800116] env[62522]: value = "task-2415598" [ 873.800116] env[62522]: _type = "Task" [ 873.800116] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.806602] env[62522]: DEBUG oslo_vmware.api [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415598, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.850594] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.609s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.851185] env[62522]: DEBUG nova.compute.manager [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 873.856319] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.729s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 873.860154] env[62522]: INFO nova.compute.claims [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 873.862562] env[62522]: DEBUG oslo_concurrency.lockutils [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.961381] env[62522]: DEBUG oslo_vmware.api [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415597, 'name': PowerOnVM_Task, 'duration_secs': 0.54539} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.961736] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 873.962110] env[62522]: INFO nova.compute.manager [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Took 7.59 seconds to spawn the instance on the hypervisor. [ 873.962383] env[62522]: DEBUG nova.compute.manager [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 873.963362] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f7274e0-92c6-441c-9e20-4fea7f6100da {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.981368] env[62522]: DEBUG nova.compute.manager [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Stashing vm_state: active {{(pid=62522) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 874.311182] env[62522]: DEBUG oslo_vmware.api [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415598, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.369103] env[62522]: DEBUG nova.compute.utils [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 874.369103] env[62522]: DEBUG nova.compute.manager [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 874.369103] env[62522]: DEBUG nova.network.neutron [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 874.417948] env[62522]: DEBUG nova.policy [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cf02455354954275b86bee37d357f071', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0dae444f2b5845aa9264fea1f237f0e3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 874.495629] env[62522]: INFO nova.compute.manager [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Took 45.45 seconds to build instance. [ 874.517257] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 874.768855] env[62522]: DEBUG nova.network.neutron [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Successfully created port: 40bd7b1c-a8fa-4e59-802e-a8392e0d30eb {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 874.811092] env[62522]: DEBUG oslo_vmware.api [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415598, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.579934} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.811589] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] ff6637e9-2a67-4302-9769-24ec045538d4/ff6637e9-2a67-4302-9769-24ec045538d4.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 874.811805] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 874.812080] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2c5da534-58ca-4f6b-93bb-2e3b0ab51aa7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.819016] env[62522]: DEBUG oslo_vmware.api [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the task: (returnval){ [ 874.819016] env[62522]: value = "task-2415599" [ 874.819016] env[62522]: _type = "Task" [ 874.819016] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.830706] env[62522]: DEBUG oslo_vmware.api [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415599, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.871328] env[62522]: DEBUG nova.compute.utils [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 875.000504] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4e1414f0-f7db-4b53-9b40-fd7e28c94a97 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Lock "6ef27aee-719c-4089-825d-fc117e867bde" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.266s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.101819] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Acquiring lock "17ec01e7-9735-4771-a73c-c4c7634d59f1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 875.102067] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Lock "17ec01e7-9735-4771-a73c-c4c7634d59f1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.330130] env[62522]: DEBUG oslo_vmware.api [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415599, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.351971] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13d17d56-208e-489d-a09f-b8fd08190f0d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.360160] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69445833-5903-4f18-9c0d-d6e015ea4cf6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.396598] env[62522]: DEBUG nova.compute.manager [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 875.400360] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83dfabf6-026f-4451-ac3f-3d0cdd5fb8fd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.408323] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dae562c-2bec-454c-a363-9290ec97ed28 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.421589] env[62522]: DEBUG nova.compute.provider_tree [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 875.503706] env[62522]: DEBUG nova.compute.manager [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 875.831787] env[62522]: DEBUG oslo_vmware.api [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415599, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.518955} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.831787] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 875.831787] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c9f5dab-3ed9-42c5-8408-d2bb71dcffbd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.853181] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] ff6637e9-2a67-4302-9769-24ec045538d4/ff6637e9-2a67-4302-9769-24ec045538d4.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 875.853489] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67cdd6b8-ba83-4601-b6e0-7679e24d3eed {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.874869] env[62522]: DEBUG oslo_vmware.api [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the task: (returnval){ [ 875.874869] env[62522]: value = "task-2415600" [ 875.874869] env[62522]: _type = "Task" [ 875.874869] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.883585] env[62522]: DEBUG oslo_vmware.api [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415600, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.926038] env[62522]: DEBUG nova.scheduler.client.report [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 876.104480] env[62522]: DEBUG oslo_concurrency.lockutils [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 876.289108] env[62522]: DEBUG nova.network.neutron [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Successfully updated port: 40bd7b1c-a8fa-4e59-802e-a8392e0d30eb {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 876.315755] env[62522]: DEBUG nova.compute.manager [req-fcd6599d-d06f-4a3a-a1e2-7e5f801c9435 req-bcf18864-ae9d-43e6-976e-1cbac2e42c23 service nova] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Received event network-vif-plugged-40bd7b1c-a8fa-4e59-802e-a8392e0d30eb {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 876.315989] env[62522]: DEBUG oslo_concurrency.lockutils [req-fcd6599d-d06f-4a3a-a1e2-7e5f801c9435 req-bcf18864-ae9d-43e6-976e-1cbac2e42c23 service nova] Acquiring lock "9141ffdd-cbfa-4efe-a01b-dc1326af474c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 876.316239] env[62522]: DEBUG oslo_concurrency.lockutils [req-fcd6599d-d06f-4a3a-a1e2-7e5f801c9435 req-bcf18864-ae9d-43e6-976e-1cbac2e42c23 service nova] Lock "9141ffdd-cbfa-4efe-a01b-dc1326af474c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 876.316413] env[62522]: DEBUG oslo_concurrency.lockutils [req-fcd6599d-d06f-4a3a-a1e2-7e5f801c9435 req-bcf18864-ae9d-43e6-976e-1cbac2e42c23 service nova] Lock "9141ffdd-cbfa-4efe-a01b-dc1326af474c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.317099] env[62522]: DEBUG nova.compute.manager [req-fcd6599d-d06f-4a3a-a1e2-7e5f801c9435 req-bcf18864-ae9d-43e6-976e-1cbac2e42c23 service nova] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] No waiting events found dispatching network-vif-plugged-40bd7b1c-a8fa-4e59-802e-a8392e0d30eb {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 876.317099] env[62522]: WARNING nova.compute.manager [req-fcd6599d-d06f-4a3a-a1e2-7e5f801c9435 req-bcf18864-ae9d-43e6-976e-1cbac2e42c23 service nova] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Received unexpected event network-vif-plugged-40bd7b1c-a8fa-4e59-802e-a8392e0d30eb for instance with vm_state building and task_state spawning. [ 876.385879] env[62522]: DEBUG oslo_vmware.api [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415600, 'name': ReconfigVM_Task, 'duration_secs': 0.29653} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.386210] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Reconfigured VM instance instance-00000039 to attach disk [datastore1] ff6637e9-2a67-4302-9769-24ec045538d4/ff6637e9-2a67-4302-9769-24ec045538d4.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 876.386825] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-32532adf-fdab-443e-8094-f4f049513bd7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.394263] env[62522]: DEBUG oslo_vmware.api [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the task: (returnval){ [ 876.394263] env[62522]: value = "task-2415601" [ 876.394263] env[62522]: _type = "Task" [ 876.394263] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.403794] env[62522]: DEBUG oslo_vmware.api [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415601, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.410138] env[62522]: DEBUG nova.compute.manager [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 876.429999] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.576s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.430570] env[62522]: DEBUG nova.compute.manager [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 876.433523] env[62522]: DEBUG oslo_concurrency.lockutils [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.618s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 876.435056] env[62522]: INFO nova.compute.claims [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 876.440298] env[62522]: DEBUG nova.virt.hardware [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:18:56Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='597994831',id=21,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-891273421',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 876.440575] env[62522]: DEBUG nova.virt.hardware [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 876.440785] env[62522]: DEBUG nova.virt.hardware [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 876.441038] env[62522]: DEBUG nova.virt.hardware [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 876.441210] env[62522]: DEBUG nova.virt.hardware [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 876.441361] env[62522]: DEBUG nova.virt.hardware [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 876.441571] env[62522]: DEBUG nova.virt.hardware [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 876.441733] env[62522]: DEBUG nova.virt.hardware [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 876.441899] env[62522]: DEBUG nova.virt.hardware [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 876.442121] env[62522]: DEBUG nova.virt.hardware [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 876.442311] env[62522]: DEBUG nova.virt.hardware [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 876.443437] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35e1a762-8bd6-4a19-88a7-26a83b524abc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.453515] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fbef074-acb0-4297-a8f6-18d681e27694 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.792938] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Acquiring lock "refresh_cache-9141ffdd-cbfa-4efe-a01b-dc1326af474c" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 876.792938] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Acquired lock "refresh_cache-9141ffdd-cbfa-4efe-a01b-dc1326af474c" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.793141] env[62522]: DEBUG nova.network.neutron [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 876.903805] env[62522]: DEBUG oslo_vmware.api [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415601, 'name': Rename_Task, 'duration_secs': 0.148595} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.904101] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 876.904340] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-22efb75c-92bb-4b92-a033-a3555936d270 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.911600] env[62522]: DEBUG oslo_vmware.api [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the task: (returnval){ [ 876.911600] env[62522]: value = "task-2415602" [ 876.911600] env[62522]: _type = "Task" [ 876.911600] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.924355] env[62522]: DEBUG oslo_vmware.api [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415602, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.944679] env[62522]: DEBUG nova.compute.utils [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 876.946203] env[62522]: DEBUG nova.compute.manager [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 876.946203] env[62522]: DEBUG nova.network.neutron [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 876.995314] env[62522]: DEBUG nova.policy [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0b81d399f06a47bc819693b52bb74004', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ff5da278d2be4ca983424c8291beadec', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 877.257212] env[62522]: DEBUG nova.network.neutron [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Successfully created port: 15fdf0ad-85f9-4e6d-ace5-5a462f025e45 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 877.346948] env[62522]: DEBUG nova.network.neutron [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 877.424562] env[62522]: DEBUG oslo_vmware.api [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415602, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.450261] env[62522]: DEBUG nova.compute.manager [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 877.562573] env[62522]: DEBUG nova.network.neutron [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Updating instance_info_cache with network_info: [{"id": "40bd7b1c-a8fa-4e59-802e-a8392e0d30eb", "address": "fa:16:3e:13:c0:8d", "network": {"id": "2d13676e-cb66-4166-b85f-b3c122b27f67", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1154652116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0dae444f2b5845aa9264fea1f237f0e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40bd7b1c-a8", "ovs_interfaceid": "40bd7b1c-a8fa-4e59-802e-a8392e0d30eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.927405] env[62522]: DEBUG oslo_vmware.api [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415602, 'name': PowerOnVM_Task, 'duration_secs': 0.99202} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.930415] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 877.930625] env[62522]: INFO nova.compute.manager [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Took 8.96 seconds to spawn the instance on the hypervisor. [ 877.930806] env[62522]: DEBUG nova.compute.manager [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 877.931767] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c99bd785-4e5e-4fb5-aec1-b4840338cc9c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.998480] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1380ccc-8770-4f43-9d18-a6c16385e34b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.006368] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b327fa6-55c7-4e2d-b8ab-5fc5663934c3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.042488] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e7c5cd3-2692-4ae5-993c-0ce569907f74 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.050071] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3360533-3070-46b0-bdc5-50e12bcabec9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.063385] env[62522]: DEBUG nova.compute.provider_tree [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 878.067240] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Releasing lock "refresh_cache-9141ffdd-cbfa-4efe-a01b-dc1326af474c" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 878.067677] env[62522]: DEBUG nova.compute.manager [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Instance network_info: |[{"id": "40bd7b1c-a8fa-4e59-802e-a8392e0d30eb", "address": "fa:16:3e:13:c0:8d", "network": {"id": "2d13676e-cb66-4166-b85f-b3c122b27f67", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1154652116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0dae444f2b5845aa9264fea1f237f0e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40bd7b1c-a8", "ovs_interfaceid": "40bd7b1c-a8fa-4e59-802e-a8392e0d30eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 878.067869] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:13:c0:8d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa09e855-8af1-419b-b78d-8ffcc94b1bfb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '40bd7b1c-a8fa-4e59-802e-a8392e0d30eb', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 878.075489] env[62522]: DEBUG oslo.service.loopingcall [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 878.076290] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 878.076506] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-99670160-3493-4fbb-96d1-bbb935bf64dc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.098494] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 878.098494] env[62522]: value = "task-2415603" [ 878.098494] env[62522]: _type = "Task" [ 878.098494] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.109616] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415603, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.342764] env[62522]: DEBUG nova.compute.manager [req-f34174a0-b086-4e2d-ac1c-212489f67dec req-76a773d1-4c7b-4bb4-88d3-9de77e64f885 service nova] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Received event network-changed-40bd7b1c-a8fa-4e59-802e-a8392e0d30eb {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 878.342934] env[62522]: DEBUG nova.compute.manager [req-f34174a0-b086-4e2d-ac1c-212489f67dec req-76a773d1-4c7b-4bb4-88d3-9de77e64f885 service nova] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Refreshing instance network info cache due to event network-changed-40bd7b1c-a8fa-4e59-802e-a8392e0d30eb. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 878.343166] env[62522]: DEBUG oslo_concurrency.lockutils [req-f34174a0-b086-4e2d-ac1c-212489f67dec req-76a773d1-4c7b-4bb4-88d3-9de77e64f885 service nova] Acquiring lock "refresh_cache-9141ffdd-cbfa-4efe-a01b-dc1326af474c" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.343301] env[62522]: DEBUG oslo_concurrency.lockutils [req-f34174a0-b086-4e2d-ac1c-212489f67dec req-76a773d1-4c7b-4bb4-88d3-9de77e64f885 service nova] Acquired lock "refresh_cache-9141ffdd-cbfa-4efe-a01b-dc1326af474c" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.343460] env[62522]: DEBUG nova.network.neutron [req-f34174a0-b086-4e2d-ac1c-212489f67dec req-76a773d1-4c7b-4bb4-88d3-9de77e64f885 service nova] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Refreshing network info cache for port 40bd7b1c-a8fa-4e59-802e-a8392e0d30eb {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 878.453223] env[62522]: INFO nova.compute.manager [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Took 47.78 seconds to build instance. [ 878.458965] env[62522]: DEBUG nova.compute.manager [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 878.484400] env[62522]: DEBUG nova.virt.hardware [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 878.484646] env[62522]: DEBUG nova.virt.hardware [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 878.484802] env[62522]: DEBUG nova.virt.hardware [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 878.484989] env[62522]: DEBUG nova.virt.hardware [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 878.485151] env[62522]: DEBUG nova.virt.hardware [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 878.485297] env[62522]: DEBUG nova.virt.hardware [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 878.485503] env[62522]: DEBUG nova.virt.hardware [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 878.485691] env[62522]: DEBUG nova.virt.hardware [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 878.485883] env[62522]: DEBUG nova.virt.hardware [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 878.486063] env[62522]: DEBUG nova.virt.hardware [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 878.486239] env[62522]: DEBUG nova.virt.hardware [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 878.488594] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abade1ec-e7f3-4786-819e-6c07848a8c73 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.495931] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef104027-e2ab-4c2d-bea5-dd7986f9de4b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.567310] env[62522]: DEBUG nova.scheduler.client.report [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 878.608648] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415603, 'name': CreateVM_Task, 'duration_secs': 0.307902} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.608824] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 878.609528] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.609694] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.610055] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 878.610314] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63af04d2-f2ed-483e-9b15-e6640182a0d2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.615420] env[62522]: DEBUG oslo_vmware.api [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Waiting for the task: (returnval){ [ 878.615420] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529529c5-d46f-bc64-f6a4-3342ef13fae7" [ 878.615420] env[62522]: _type = "Task" [ 878.615420] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.623437] env[62522]: DEBUG oslo_vmware.api [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529529c5-d46f-bc64-f6a4-3342ef13fae7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.783362] env[62522]: DEBUG nova.network.neutron [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Successfully updated port: 15fdf0ad-85f9-4e6d-ace5-5a462f025e45 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 878.954827] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccb0181a-5fb6-47f3-8f8c-36fb1f26a821 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Lock "ff6637e9-2a67-4302-9769-24ec045538d4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.798s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.063483] env[62522]: DEBUG nova.network.neutron [req-f34174a0-b086-4e2d-ac1c-212489f67dec req-76a773d1-4c7b-4bb4-88d3-9de77e64f885 service nova] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Updated VIF entry in instance network info cache for port 40bd7b1c-a8fa-4e59-802e-a8392e0d30eb. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 879.063978] env[62522]: DEBUG nova.network.neutron [req-f34174a0-b086-4e2d-ac1c-212489f67dec req-76a773d1-4c7b-4bb4-88d3-9de77e64f885 service nova] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Updating instance_info_cache with network_info: [{"id": "40bd7b1c-a8fa-4e59-802e-a8392e0d30eb", "address": "fa:16:3e:13:c0:8d", "network": {"id": "2d13676e-cb66-4166-b85f-b3c122b27f67", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1154652116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0dae444f2b5845aa9264fea1f237f0e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40bd7b1c-a8", "ovs_interfaceid": "40bd7b1c-a8fa-4e59-802e-a8392e0d30eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.074062] env[62522]: DEBUG oslo_concurrency.lockutils [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.641s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.074603] env[62522]: DEBUG nova.compute.manager [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 879.078410] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.078665] env[62522]: DEBUG nova.objects.instance [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Lazy-loading 'resources' on Instance uuid 95e4fe36-6830-4fc4-bb53-1e5643c2f95b {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 879.130631] env[62522]: DEBUG oslo_vmware.api [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529529c5-d46f-bc64-f6a4-3342ef13fae7, 'name': SearchDatastore_Task, 'duration_secs': 0.015279} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.130938] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.131190] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 879.131415] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.132019] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.132019] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 879.132660] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4b5a0e62-0a8e-4d16-aa27-19135b18a83b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.142969] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 879.142969] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 879.143685] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b42c8d4-0c36-4ed1-9e95-067aadf04e8e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.150424] env[62522]: DEBUG oslo_vmware.api [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Waiting for the task: (returnval){ [ 879.150424] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5218860f-73f0-ceba-b696-59125677a18c" [ 879.150424] env[62522]: _type = "Task" [ 879.150424] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.160215] env[62522]: DEBUG oslo_vmware.api [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5218860f-73f0-ceba-b696-59125677a18c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.287121] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "refresh_cache-fe1f5581-0dec-41e5-a450-c3de5a573602" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.287331] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquired lock "refresh_cache-fe1f5581-0dec-41e5-a450-c3de5a573602" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.287494] env[62522]: DEBUG nova.network.neutron [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 879.457546] env[62522]: DEBUG nova.compute.manager [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 879.566879] env[62522]: DEBUG oslo_concurrency.lockutils [req-f34174a0-b086-4e2d-ac1c-212489f67dec req-76a773d1-4c7b-4bb4-88d3-9de77e64f885 service nova] Releasing lock "refresh_cache-9141ffdd-cbfa-4efe-a01b-dc1326af474c" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.579652] env[62522]: DEBUG nova.compute.utils [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 879.581051] env[62522]: DEBUG nova.compute.manager [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 879.581233] env[62522]: DEBUG nova.network.neutron [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 879.633464] env[62522]: DEBUG nova.policy [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '30322764deb64be28fcba5630b7240d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f93394feaa4f4b61a5d3d670d32ec599', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 879.667143] env[62522]: DEBUG oslo_vmware.api [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5218860f-73f0-ceba-b696-59125677a18c, 'name': SearchDatastore_Task, 'duration_secs': 0.015981} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.668036] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d42bb5c8-fee8-4646-8ec1-58a7fc50a9bd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.678329] env[62522]: DEBUG oslo_vmware.api [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Waiting for the task: (returnval){ [ 879.678329] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529566bc-307c-3a09-4233-29761012c626" [ 879.678329] env[62522]: _type = "Task" [ 879.678329] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.686415] env[62522]: DEBUG oslo_vmware.api [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529566bc-307c-3a09-4233-29761012c626, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.832808] env[62522]: DEBUG nova.network.neutron [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 879.949465] env[62522]: DEBUG nova.network.neutron [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Successfully created port: 931dfe44-9ac3-4df4-a4ea-6c8612389451 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 879.983940] env[62522]: DEBUG oslo_concurrency.lockutils [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.995964] env[62522]: DEBUG nova.network.neutron [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Updating instance_info_cache with network_info: [{"id": "15fdf0ad-85f9-4e6d-ace5-5a462f025e45", "address": "fa:16:3e:e0:f1:2d", "network": {"id": "70e81afa-0eda-49c5-b072-e79b3c287468", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1715169983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff5da278d2be4ca983424c8291beadec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7654928b-7afe-42e3-a18d-68ecc775cefe", "external-id": "cl2-zone-807", "segmentation_id": 807, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15fdf0ad-85", "ovs_interfaceid": "15fdf0ad-85f9-4e6d-ace5-5a462f025e45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.089108] env[62522]: DEBUG nova.compute.manager [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 880.119054] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1939a983-55f8-4420-bd0d-9dd3c64e03ba {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.129184] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f45156-6602-4072-a1a2-cee7e395e499 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.131887] env[62522]: DEBUG nova.compute.manager [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 880.132653] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d7a250d-082f-4a36-b632-e7e4c48d1e7a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.166862] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-641f202f-5de9-4ab1-b2a1-d3adadc04637 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.175022] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c437d0e-2cc7-482d-b08e-6073e276e495 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.194227] env[62522]: DEBUG nova.compute.provider_tree [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 880.198858] env[62522]: DEBUG oslo_vmware.api [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529566bc-307c-3a09-4233-29761012c626, 'name': SearchDatastore_Task, 'duration_secs': 0.011784} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.199274] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.199530] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 9141ffdd-cbfa-4efe-a01b-dc1326af474c/9141ffdd-cbfa-4efe-a01b-dc1326af474c.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 880.199773] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3fdd107c-aa16-492f-8c3e-2b626072963c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.206754] env[62522]: DEBUG oslo_vmware.api [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Waiting for the task: (returnval){ [ 880.206754] env[62522]: value = "task-2415604" [ 880.206754] env[62522]: _type = "Task" [ 880.206754] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.214933] env[62522]: DEBUG oslo_vmware.api [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2415604, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.429685] env[62522]: DEBUG nova.compute.manager [req-7df48cb7-019b-4ff7-8a22-1578ab93ec51 req-4ca0c020-c346-4e5c-b843-93774a59977e service nova] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Received event network-vif-plugged-15fdf0ad-85f9-4e6d-ace5-5a462f025e45 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 880.429972] env[62522]: DEBUG oslo_concurrency.lockutils [req-7df48cb7-019b-4ff7-8a22-1578ab93ec51 req-4ca0c020-c346-4e5c-b843-93774a59977e service nova] Acquiring lock "fe1f5581-0dec-41e5-a450-c3de5a573602-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.430182] env[62522]: DEBUG oslo_concurrency.lockutils [req-7df48cb7-019b-4ff7-8a22-1578ab93ec51 req-4ca0c020-c346-4e5c-b843-93774a59977e service nova] Lock "fe1f5581-0dec-41e5-a450-c3de5a573602-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.430461] env[62522]: DEBUG oslo_concurrency.lockutils [req-7df48cb7-019b-4ff7-8a22-1578ab93ec51 req-4ca0c020-c346-4e5c-b843-93774a59977e service nova] Lock "fe1f5581-0dec-41e5-a450-c3de5a573602-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.430542] env[62522]: DEBUG nova.compute.manager [req-7df48cb7-019b-4ff7-8a22-1578ab93ec51 req-4ca0c020-c346-4e5c-b843-93774a59977e service nova] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] No waiting events found dispatching network-vif-plugged-15fdf0ad-85f9-4e6d-ace5-5a462f025e45 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 880.430683] env[62522]: WARNING nova.compute.manager [req-7df48cb7-019b-4ff7-8a22-1578ab93ec51 req-4ca0c020-c346-4e5c-b843-93774a59977e service nova] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Received unexpected event network-vif-plugged-15fdf0ad-85f9-4e6d-ace5-5a462f025e45 for instance with vm_state building and task_state spawning. [ 880.430879] env[62522]: DEBUG nova.compute.manager [req-7df48cb7-019b-4ff7-8a22-1578ab93ec51 req-4ca0c020-c346-4e5c-b843-93774a59977e service nova] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Received event network-changed-15fdf0ad-85f9-4e6d-ace5-5a462f025e45 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 880.431097] env[62522]: DEBUG nova.compute.manager [req-7df48cb7-019b-4ff7-8a22-1578ab93ec51 req-4ca0c020-c346-4e5c-b843-93774a59977e service nova] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Refreshing instance network info cache due to event network-changed-15fdf0ad-85f9-4e6d-ace5-5a462f025e45. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 880.431373] env[62522]: DEBUG oslo_concurrency.lockutils [req-7df48cb7-019b-4ff7-8a22-1578ab93ec51 req-4ca0c020-c346-4e5c-b843-93774a59977e service nova] Acquiring lock "refresh_cache-fe1f5581-0dec-41e5-a450-c3de5a573602" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 880.498466] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Releasing lock "refresh_cache-fe1f5581-0dec-41e5-a450-c3de5a573602" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.498808] env[62522]: DEBUG nova.compute.manager [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Instance network_info: |[{"id": "15fdf0ad-85f9-4e6d-ace5-5a462f025e45", "address": "fa:16:3e:e0:f1:2d", "network": {"id": "70e81afa-0eda-49c5-b072-e79b3c287468", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1715169983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff5da278d2be4ca983424c8291beadec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7654928b-7afe-42e3-a18d-68ecc775cefe", "external-id": "cl2-zone-807", "segmentation_id": 807, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15fdf0ad-85", "ovs_interfaceid": "15fdf0ad-85f9-4e6d-ace5-5a462f025e45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 880.499215] env[62522]: DEBUG oslo_concurrency.lockutils [req-7df48cb7-019b-4ff7-8a22-1578ab93ec51 req-4ca0c020-c346-4e5c-b843-93774a59977e service nova] Acquired lock "refresh_cache-fe1f5581-0dec-41e5-a450-c3de5a573602" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.499409] env[62522]: DEBUG nova.network.neutron [req-7df48cb7-019b-4ff7-8a22-1578ab93ec51 req-4ca0c020-c346-4e5c-b843-93774a59977e service nova] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Refreshing network info cache for port 15fdf0ad-85f9-4e6d-ace5-5a462f025e45 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 880.500722] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:f1:2d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7654928b-7afe-42e3-a18d-68ecc775cefe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '15fdf0ad-85f9-4e6d-ace5-5a462f025e45', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 880.508251] env[62522]: DEBUG oslo.service.loopingcall [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 880.508782] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 880.509584] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-95808014-f6c5-47d0-bf87-9a336e032fa7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.530463] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 880.530463] env[62522]: value = "task-2415605" [ 880.530463] env[62522]: _type = "Task" [ 880.530463] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.540577] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415605, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.669095] env[62522]: INFO nova.compute.manager [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] instance snapshotting [ 880.674050] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47028a3d-88bd-4053-8bfb-9efedbd9457d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.692018] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf52eec-f7aa-4409-ae03-579f0edae248 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.701991] env[62522]: DEBUG nova.scheduler.client.report [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 880.716681] env[62522]: DEBUG oslo_vmware.api [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2415604, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.462173} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.716681] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 9141ffdd-cbfa-4efe-a01b-dc1326af474c/9141ffdd-cbfa-4efe-a01b-dc1326af474c.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 880.716885] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 880.717027] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-03ac0da5-7786-497a-b14e-f099a4a9990a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.725435] env[62522]: DEBUG oslo_vmware.api [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Waiting for the task: (returnval){ [ 880.725435] env[62522]: value = "task-2415606" [ 880.725435] env[62522]: _type = "Task" [ 880.725435] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.734171] env[62522]: DEBUG oslo_vmware.api [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2415606, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.042617] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415605, 'name': CreateVM_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.099421] env[62522]: DEBUG nova.compute.manager [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 881.122919] env[62522]: DEBUG nova.virt.hardware [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 881.123230] env[62522]: DEBUG nova.virt.hardware [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 881.123398] env[62522]: DEBUG nova.virt.hardware [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 881.123584] env[62522]: DEBUG nova.virt.hardware [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 881.123733] env[62522]: DEBUG nova.virt.hardware [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 881.123880] env[62522]: DEBUG nova.virt.hardware [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 881.124118] env[62522]: DEBUG nova.virt.hardware [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 881.124343] env[62522]: DEBUG nova.virt.hardware [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 881.124528] env[62522]: DEBUG nova.virt.hardware [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 881.124694] env[62522]: DEBUG nova.virt.hardware [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 881.124860] env[62522]: DEBUG nova.virt.hardware [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 881.125729] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fe80d69-1245-4777-9daa-ac76bac0c1cb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.135359] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c9ea11-1b93-40f5-8571-846009da167f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.208039] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.130s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.210397] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.185s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.210637] env[62522]: DEBUG nova.objects.instance [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Lazy-loading 'resources' on Instance uuid 76cb551e-e605-4c80-a6ef-e36681fc0bc2 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 881.212673] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Creating Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 881.213161] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-4b4d0abf-9abd-4985-b450-211d84dbe9b1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.220641] env[62522]: DEBUG oslo_vmware.api [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the task: (returnval){ [ 881.220641] env[62522]: value = "task-2415607" [ 881.220641] env[62522]: _type = "Task" [ 881.220641] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.231094] env[62522]: DEBUG oslo_vmware.api [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415607, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.234699] env[62522]: INFO nova.scheduler.client.report [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Deleted allocations for instance 95e4fe36-6830-4fc4-bb53-1e5643c2f95b [ 881.238453] env[62522]: DEBUG oslo_vmware.api [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2415606, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064029} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.240954] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 881.242177] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b68256bd-13bf-4bc7-ad62-ead528734cd7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.265131] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] 9141ffdd-cbfa-4efe-a01b-dc1326af474c/9141ffdd-cbfa-4efe-a01b-dc1326af474c.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 881.265791] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d671f375-6ffe-4f42-a604-9ec3c74214de {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.282260] env[62522]: DEBUG nova.network.neutron [req-7df48cb7-019b-4ff7-8a22-1578ab93ec51 req-4ca0c020-c346-4e5c-b843-93774a59977e service nova] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Updated VIF entry in instance network info cache for port 15fdf0ad-85f9-4e6d-ace5-5a462f025e45. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 881.283026] env[62522]: DEBUG nova.network.neutron [req-7df48cb7-019b-4ff7-8a22-1578ab93ec51 req-4ca0c020-c346-4e5c-b843-93774a59977e service nova] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Updating instance_info_cache with network_info: [{"id": "15fdf0ad-85f9-4e6d-ace5-5a462f025e45", "address": "fa:16:3e:e0:f1:2d", "network": {"id": "70e81afa-0eda-49c5-b072-e79b3c287468", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1715169983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff5da278d2be4ca983424c8291beadec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7654928b-7afe-42e3-a18d-68ecc775cefe", "external-id": "cl2-zone-807", "segmentation_id": 807, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15fdf0ad-85", "ovs_interfaceid": "15fdf0ad-85f9-4e6d-ace5-5a462f025e45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.290034] env[62522]: DEBUG oslo_vmware.api [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Waiting for the task: (returnval){ [ 881.290034] env[62522]: value = "task-2415608" [ 881.290034] env[62522]: _type = "Task" [ 881.290034] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.297442] env[62522]: DEBUG oslo_vmware.api [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2415608, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.471858] env[62522]: DEBUG nova.network.neutron [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Successfully updated port: 931dfe44-9ac3-4df4-a4ea-6c8612389451 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 881.541910] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415605, 'name': CreateVM_Task, 'duration_secs': 0.70332} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.542109] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 881.543082] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.543283] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.543609] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 881.543868] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13d79711-86ec-4bdc-bcdc-fbc16e0a8eba {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.548741] env[62522]: DEBUG oslo_vmware.api [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 881.548741] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52530064-c948-4a3f-020b-bbcc80791092" [ 881.548741] env[62522]: _type = "Task" [ 881.548741] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.556561] env[62522]: DEBUG oslo_vmware.api [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52530064-c948-4a3f-020b-bbcc80791092, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.731088] env[62522]: DEBUG oslo_vmware.api [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415607, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.744853] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2d2ba3fe-a9c7-4a55-b0b9-8640025994ab tempest-ServersTestJSON-2066055632 tempest-ServersTestJSON-2066055632-project-member] Lock "95e4fe36-6830-4fc4-bb53-1e5643c2f95b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.651s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.785334] env[62522]: DEBUG oslo_concurrency.lockutils [req-7df48cb7-019b-4ff7-8a22-1578ab93ec51 req-4ca0c020-c346-4e5c-b843-93774a59977e service nova] Releasing lock "refresh_cache-fe1f5581-0dec-41e5-a450-c3de5a573602" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 881.803399] env[62522]: DEBUG oslo_vmware.api [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2415608, 'name': ReconfigVM_Task, 'duration_secs': 0.295147} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.805931] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Reconfigured VM instance instance-0000003a to attach disk [datastore1] 9141ffdd-cbfa-4efe-a01b-dc1326af474c/9141ffdd-cbfa-4efe-a01b-dc1326af474c.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 881.806247] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=62522) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 881.807787] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-93ca3678-8fdb-43cb-b028-5ec356d51516 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.813774] env[62522]: DEBUG oslo_vmware.api [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Waiting for the task: (returnval){ [ 881.813774] env[62522]: value = "task-2415609" [ 881.813774] env[62522]: _type = "Task" [ 881.813774] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.824530] env[62522]: DEBUG oslo_vmware.api [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2415609, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.977718] env[62522]: DEBUG oslo_concurrency.lockutils [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "refresh_cache-c28d2907-5b59-4df8-91a8-4ba0f2047d89" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.977718] env[62522]: DEBUG oslo_concurrency.lockutils [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquired lock "refresh_cache-c28d2907-5b59-4df8-91a8-4ba0f2047d89" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.977718] env[62522]: DEBUG nova.network.neutron [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 882.063252] env[62522]: DEBUG oslo_vmware.api [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52530064-c948-4a3f-020b-bbcc80791092, 'name': SearchDatastore_Task, 'duration_secs': 0.008763} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.063620] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.064147] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 882.064147] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.064273] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.064464] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 882.064866] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-82abfae2-c4d8-4cab-a1ad-a2a7040323b6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.078066] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 882.078066] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 882.079056] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-767f03e2-24a4-414a-befa-47ea0adea836 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.088128] env[62522]: DEBUG oslo_vmware.api [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 882.088128] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525e0e84-2d05-c905-37f8-a058e8a66f82" [ 882.088128] env[62522]: _type = "Task" [ 882.088128] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.097174] env[62522]: DEBUG oslo_vmware.api [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525e0e84-2d05-c905-37f8-a058e8a66f82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.207931] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d6b4a7-ae46-4cc9-a5e0-da6ba5439d0e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.215521] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f7a2811-661c-47cf-a57c-19b84f29cc22 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.248695] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa76b2e7-21d7-4402-b58f-411de31ec4a5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.260229] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-497ed7bb-48c4-454c-9540-6b449dbfc501 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.264041] env[62522]: DEBUG oslo_vmware.api [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415607, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.275934] env[62522]: DEBUG nova.compute.provider_tree [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 882.323542] env[62522]: DEBUG oslo_vmware.api [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2415609, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.048151} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.323796] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=62522) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 882.324576] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bee7c1c0-e4dd-4538-84cd-5fce66beca8a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.349328] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] 9141ffdd-cbfa-4efe-a01b-dc1326af474c/ephemeral_0.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 882.349611] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0de4030e-2702-4921-b461-0f1767a09ec3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.374590] env[62522]: DEBUG oslo_vmware.api [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Waiting for the task: (returnval){ [ 882.374590] env[62522]: value = "task-2415610" [ 882.374590] env[62522]: _type = "Task" [ 882.374590] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.383534] env[62522]: DEBUG oslo_vmware.api [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2415610, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.526357] env[62522]: DEBUG nova.compute.manager [req-e47ace33-b1a5-495a-819a-f21d718a38ae req-77acc990-271f-43c6-9736-fe36d192ac2b service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Received event network-vif-plugged-931dfe44-9ac3-4df4-a4ea-6c8612389451 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 882.526584] env[62522]: DEBUG oslo_concurrency.lockutils [req-e47ace33-b1a5-495a-819a-f21d718a38ae req-77acc990-271f-43c6-9736-fe36d192ac2b service nova] Acquiring lock "c28d2907-5b59-4df8-91a8-4ba0f2047d89-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.526789] env[62522]: DEBUG oslo_concurrency.lockutils [req-e47ace33-b1a5-495a-819a-f21d718a38ae req-77acc990-271f-43c6-9736-fe36d192ac2b service nova] Lock "c28d2907-5b59-4df8-91a8-4ba0f2047d89-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.526997] env[62522]: DEBUG oslo_concurrency.lockutils [req-e47ace33-b1a5-495a-819a-f21d718a38ae req-77acc990-271f-43c6-9736-fe36d192ac2b service nova] Lock "c28d2907-5b59-4df8-91a8-4ba0f2047d89-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.527138] env[62522]: DEBUG nova.compute.manager [req-e47ace33-b1a5-495a-819a-f21d718a38ae req-77acc990-271f-43c6-9736-fe36d192ac2b service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] No waiting events found dispatching network-vif-plugged-931dfe44-9ac3-4df4-a4ea-6c8612389451 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 882.527325] env[62522]: WARNING nova.compute.manager [req-e47ace33-b1a5-495a-819a-f21d718a38ae req-77acc990-271f-43c6-9736-fe36d192ac2b service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Received unexpected event network-vif-plugged-931dfe44-9ac3-4df4-a4ea-6c8612389451 for instance with vm_state building and task_state spawning. [ 882.527496] env[62522]: DEBUG nova.compute.manager [req-e47ace33-b1a5-495a-819a-f21d718a38ae req-77acc990-271f-43c6-9736-fe36d192ac2b service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Received event network-changed-931dfe44-9ac3-4df4-a4ea-6c8612389451 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 882.527591] env[62522]: DEBUG nova.compute.manager [req-e47ace33-b1a5-495a-819a-f21d718a38ae req-77acc990-271f-43c6-9736-fe36d192ac2b service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Refreshing instance network info cache due to event network-changed-931dfe44-9ac3-4df4-a4ea-6c8612389451. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 882.527779] env[62522]: DEBUG oslo_concurrency.lockutils [req-e47ace33-b1a5-495a-819a-f21d718a38ae req-77acc990-271f-43c6-9736-fe36d192ac2b service nova] Acquiring lock "refresh_cache-c28d2907-5b59-4df8-91a8-4ba0f2047d89" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.529362] env[62522]: DEBUG nova.network.neutron [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 882.603096] env[62522]: DEBUG oslo_vmware.api [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525e0e84-2d05-c905-37f8-a058e8a66f82, 'name': SearchDatastore_Task, 'duration_secs': 0.009976} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.603897] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-422cc863-354a-46a9-bb10-5a43e7bb8181 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.612600] env[62522]: DEBUG oslo_vmware.api [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 882.612600] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b5d207-2e47-937b-eadf-135b8f7488df" [ 882.612600] env[62522]: _type = "Task" [ 882.612600] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.621493] env[62522]: DEBUG oslo_vmware.api [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b5d207-2e47-937b-eadf-135b8f7488df, 'name': SearchDatastore_Task} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.621736] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.622017] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] fe1f5581-0dec-41e5-a450-c3de5a573602/fe1f5581-0dec-41e5-a450-c3de5a573602.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 882.622321] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f3394a8f-698f-48e6-b164-e8546743de08 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.628302] env[62522]: DEBUG oslo_vmware.api [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 882.628302] env[62522]: value = "task-2415611" [ 882.628302] env[62522]: _type = "Task" [ 882.628302] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.638957] env[62522]: DEBUG oslo_vmware.api [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415611, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.754520] env[62522]: DEBUG oslo_vmware.api [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415607, 'name': CreateSnapshot_Task, 'duration_secs': 1.231513} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.754783] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Created Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 882.755643] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b59c9a72-40b7-4de5-8ab6-4fe1dde5bab7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.761288] env[62522]: DEBUG nova.network.neutron [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Updating instance_info_cache with network_info: [{"id": "931dfe44-9ac3-4df4-a4ea-6c8612389451", "address": "fa:16:3e:f5:6d:32", "network": {"id": "949f3536-8a7e-4edf-b6cc-6a264fe5fe83", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1891232839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f93394feaa4f4b61a5d3d670d32ec599", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap931dfe44-9a", "ovs_interfaceid": "931dfe44-9ac3-4df4-a4ea-6c8612389451", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.801121] env[62522]: ERROR nova.scheduler.client.report [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [req-d54e38b4-31d3-4bb1-b1a1-325f89ae3c1d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c7fa38b2-245d-4337-a012-22c1a01c0a72. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d54e38b4-31d3-4bb1-b1a1-325f89ae3c1d"}]} [ 882.826078] env[62522]: DEBUG nova.scheduler.client.report [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Refreshing inventories for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 882.848186] env[62522]: DEBUG nova.scheduler.client.report [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Updating ProviderTree inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 882.848481] env[62522]: DEBUG nova.compute.provider_tree [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 882.862267] env[62522]: DEBUG nova.scheduler.client.report [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Refreshing aggregate associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, aggregates: None {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 882.885123] env[62522]: DEBUG oslo_vmware.api [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2415610, 'name': ReconfigVM_Task, 'duration_secs': 0.332894} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.885474] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Reconfigured VM instance instance-0000003a to attach disk [datastore1] 9141ffdd-cbfa-4efe-a01b-dc1326af474c/ephemeral_0.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 882.886952] env[62522]: DEBUG nova.scheduler.client.report [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Refreshing trait associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 882.889516] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b0d24430-640e-4589-a034-c48a539581dc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.898918] env[62522]: DEBUG oslo_vmware.api [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Waiting for the task: (returnval){ [ 882.898918] env[62522]: value = "task-2415612" [ 882.898918] env[62522]: _type = "Task" [ 882.898918] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.912218] env[62522]: DEBUG oslo_vmware.api [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2415612, 'name': Rename_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.138779] env[62522]: DEBUG oslo_vmware.api [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415611, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.490725} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.139052] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] fe1f5581-0dec-41e5-a450-c3de5a573602/fe1f5581-0dec-41e5-a450-c3de5a573602.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 883.139279] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 883.139529] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f6209e1d-34ce-477a-9fbc-d2fcf751410b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.150627] env[62522]: DEBUG oslo_vmware.api [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 883.150627] env[62522]: value = "task-2415613" [ 883.150627] env[62522]: _type = "Task" [ 883.150627] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.165698] env[62522]: DEBUG oslo_vmware.api [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415613, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.268448] env[62522]: DEBUG oslo_concurrency.lockutils [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Releasing lock "refresh_cache-c28d2907-5b59-4df8-91a8-4ba0f2047d89" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.268448] env[62522]: DEBUG nova.compute.manager [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Instance network_info: |[{"id": "931dfe44-9ac3-4df4-a4ea-6c8612389451", "address": "fa:16:3e:f5:6d:32", "network": {"id": "949f3536-8a7e-4edf-b6cc-6a264fe5fe83", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1891232839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f93394feaa4f4b61a5d3d670d32ec599", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap931dfe44-9a", "ovs_interfaceid": "931dfe44-9ac3-4df4-a4ea-6c8612389451", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 883.274479] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Creating linked-clone VM from snapshot {{(pid=62522) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 883.275771] env[62522]: DEBUG oslo_concurrency.lockutils [req-e47ace33-b1a5-495a-819a-f21d718a38ae req-77acc990-271f-43c6-9736-fe36d192ac2b service nova] Acquired lock "refresh_cache-c28d2907-5b59-4df8-91a8-4ba0f2047d89" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.275771] env[62522]: DEBUG nova.network.neutron [req-e47ace33-b1a5-495a-819a-f21d718a38ae req-77acc990-271f-43c6-9736-fe36d192ac2b service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Refreshing network info cache for port 931dfe44-9ac3-4df4-a4ea-6c8612389451 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 883.276418] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f5:6d:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '983826cf-6390-4ec6-bf97-30a1060947fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '931dfe44-9ac3-4df4-a4ea-6c8612389451', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 883.284043] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Creating folder: Project (f93394feaa4f4b61a5d3d670d32ec599). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 883.289364] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a29bc9b2-e731-42a4-8b4b-b3de00719d08 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.292486] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3bf3726f-8381-4b05-8796-14a2a84c7f74 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.300166] env[62522]: DEBUG oslo_vmware.api [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the task: (returnval){ [ 883.300166] env[62522]: value = "task-2415614" [ 883.300166] env[62522]: _type = "Task" [ 883.300166] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.304418] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Created folder: Project (f93394feaa4f4b61a5d3d670d32ec599) in parent group-v489562. [ 883.304610] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Creating folder: Instances. Parent ref: group-v489728. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 883.307262] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-609b8e44-5673-4b1a-a93d-e53595506891 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.314224] env[62522]: DEBUG oslo_vmware.api [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415614, 'name': CloneVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.322981] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Created folder: Instances in parent group-v489728. [ 883.323256] env[62522]: DEBUG oslo.service.loopingcall [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 883.323443] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 883.323644] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e91ff344-ca5a-4b55-ad5b-c69b7d7270e1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.348036] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 883.348036] env[62522]: value = "task-2415617" [ 883.348036] env[62522]: _type = "Task" [ 883.348036] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.361604] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415617, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.410102] env[62522]: DEBUG oslo_vmware.api [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2415612, 'name': Rename_Task, 'duration_secs': 0.325245} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.413720] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 883.414233] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cc09a594-d28d-426a-96b3-f227cbe7388c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.421352] env[62522]: DEBUG oslo_vmware.api [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Waiting for the task: (returnval){ [ 883.421352] env[62522]: value = "task-2415618" [ 883.421352] env[62522]: _type = "Task" [ 883.421352] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.440965] env[62522]: DEBUG oslo_vmware.api [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2415618, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.503511] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f650ce79-1cbe-48ea-b16f-348a1fc69f6d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.512079] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09bc703d-c7cb-4047-8456-453ddbca7c57 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.545216] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b406fc4e-85ff-48d8-87dd-2fea6fcb95e3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.553922] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4746f909-1f2a-41e9-9b17-537b21e29b47 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.572054] env[62522]: DEBUG nova.compute.provider_tree [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 883.665131] env[62522]: DEBUG oslo_vmware.api [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415613, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068911} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.665131] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 883.665131] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0803758e-5e1a-4d9b-8586-8de6917b5a22 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.698979] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] fe1f5581-0dec-41e5-a450-c3de5a573602/fe1f5581-0dec-41e5-a450-c3de5a573602.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 883.700435] env[62522]: DEBUG nova.network.neutron [req-e47ace33-b1a5-495a-819a-f21d718a38ae req-77acc990-271f-43c6-9736-fe36d192ac2b service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Updated VIF entry in instance network info cache for port 931dfe44-9ac3-4df4-a4ea-6c8612389451. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 883.700838] env[62522]: DEBUG nova.network.neutron [req-e47ace33-b1a5-495a-819a-f21d718a38ae req-77acc990-271f-43c6-9736-fe36d192ac2b service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Updating instance_info_cache with network_info: [{"id": "931dfe44-9ac3-4df4-a4ea-6c8612389451", "address": "fa:16:3e:f5:6d:32", "network": {"id": "949f3536-8a7e-4edf-b6cc-6a264fe5fe83", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1891232839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f93394feaa4f4b61a5d3d670d32ec599", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap931dfe44-9a", "ovs_interfaceid": "931dfe44-9ac3-4df4-a4ea-6c8612389451", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.702499] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73c85b55-0f3e-409f-94ec-f3e04b806ae4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.726582] env[62522]: DEBUG oslo_vmware.api [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 883.726582] env[62522]: value = "task-2415619" [ 883.726582] env[62522]: _type = "Task" [ 883.726582] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.735299] env[62522]: DEBUG oslo_vmware.api [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415619, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.810698] env[62522]: DEBUG oslo_vmware.api [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415614, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.857745] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415617, 'name': CreateVM_Task, 'duration_secs': 0.496162} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.858420] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 883.858974] env[62522]: DEBUG oslo_concurrency.lockutils [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.859169] env[62522]: DEBUG oslo_concurrency.lockutils [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.859488] env[62522]: DEBUG oslo_concurrency.lockutils [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 883.859739] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be9e9880-738c-4d61-8eec-a0d27458ff4d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.869675] env[62522]: DEBUG oslo_vmware.api [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 883.869675] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52378443-1d72-7a3f-ef0f-108b91385331" [ 883.869675] env[62522]: _type = "Task" [ 883.869675] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.879244] env[62522]: DEBUG oslo_vmware.api [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52378443-1d72-7a3f-ef0f-108b91385331, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.934497] env[62522]: DEBUG oslo_vmware.api [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2415618, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.110724] env[62522]: DEBUG nova.scheduler.client.report [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Updated inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with generation 89 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 884.111066] env[62522]: DEBUG nova.compute.provider_tree [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Updating resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 generation from 89 to 90 during operation: update_inventory {{(pid=62522) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 884.111420] env[62522]: DEBUG nova.compute.provider_tree [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 884.221706] env[62522]: DEBUG oslo_concurrency.lockutils [req-e47ace33-b1a5-495a-819a-f21d718a38ae req-77acc990-271f-43c6-9736-fe36d192ac2b service nova] Releasing lock "refresh_cache-c28d2907-5b59-4df8-91a8-4ba0f2047d89" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.237021] env[62522]: DEBUG oslo_vmware.api [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415619, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.312197] env[62522]: DEBUG oslo_vmware.api [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415614, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.379878] env[62522]: DEBUG oslo_vmware.api [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52378443-1d72-7a3f-ef0f-108b91385331, 'name': SearchDatastore_Task, 'duration_secs': 0.012232} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.380218] env[62522]: DEBUG oslo_concurrency.lockutils [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.380454] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 884.380723] env[62522]: DEBUG oslo_concurrency.lockutils [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.380935] env[62522]: DEBUG oslo_concurrency.lockutils [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.381097] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 884.381392] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b01b111c-ebe9-494a-9011-bc78bc35eea7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.390667] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 884.390667] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 884.391413] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00cdafdc-427a-4226-bb38-cfea4f9d751c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.396304] env[62522]: DEBUG oslo_vmware.api [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 884.396304] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5263604f-a137-3b2b-6d22-e87966e77c29" [ 884.396304] env[62522]: _type = "Task" [ 884.396304] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.404256] env[62522]: DEBUG oslo_vmware.api [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5263604f-a137-3b2b-6d22-e87966e77c29, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.431285] env[62522]: DEBUG oslo_vmware.api [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2415618, 'name': PowerOnVM_Task, 'duration_secs': 0.651814} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.431595] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 884.431803] env[62522]: INFO nova.compute.manager [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Took 8.02 seconds to spawn the instance on the hypervisor. [ 884.431982] env[62522]: DEBUG nova.compute.manager [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 884.432754] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5d6fb75-8743-48a6-aa10-1380ec68f486 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.616660] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.406s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.619155] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.021s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.619771] env[62522]: DEBUG nova.objects.instance [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lazy-loading 'resources' on Instance uuid 74e663b1-b552-4b71-aa74-308e908d79e7 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 884.642754] env[62522]: INFO nova.scheduler.client.report [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Deleted allocations for instance 76cb551e-e605-4c80-a6ef-e36681fc0bc2 [ 884.736904] env[62522]: DEBUG oslo_vmware.api [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415619, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.812239] env[62522]: DEBUG oslo_vmware.api [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415614, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.907191] env[62522]: DEBUG oslo_vmware.api [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5263604f-a137-3b2b-6d22-e87966e77c29, 'name': SearchDatastore_Task, 'duration_secs': 0.008911} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.908023] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e24177f9-38a2-4e8e-9294-02bde7d357b0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.913344] env[62522]: DEBUG oslo_vmware.api [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 884.913344] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52757a68-8091-4100-227c-15fe9cf42718" [ 884.913344] env[62522]: _type = "Task" [ 884.913344] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.921309] env[62522]: DEBUG oslo_vmware.api [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52757a68-8091-4100-227c-15fe9cf42718, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.949626] env[62522]: INFO nova.compute.manager [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Took 50.15 seconds to build instance. [ 885.150732] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0465c00b-6e47-4e83-8d65-04fbbca83ea3 tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Lock "76cb551e-e605-4c80-a6ef-e36681fc0bc2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.867s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.239126] env[62522]: DEBUG oslo_vmware.api [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415619, 'name': ReconfigVM_Task, 'duration_secs': 1.089881} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.241870] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Reconfigured VM instance instance-0000003b to attach disk [datastore1] fe1f5581-0dec-41e5-a450-c3de5a573602/fe1f5581-0dec-41e5-a450-c3de5a573602.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 885.242815] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-30170579-9af3-407c-b69e-96b1e6d6a0c4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.249735] env[62522]: DEBUG oslo_vmware.api [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 885.249735] env[62522]: value = "task-2415620" [ 885.249735] env[62522]: _type = "Task" [ 885.249735] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.260767] env[62522]: DEBUG oslo_vmware.api [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415620, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.313553] env[62522]: DEBUG oslo_vmware.api [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415614, 'name': CloneVM_Task, 'duration_secs': 1.896981} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.316935] env[62522]: INFO nova.virt.vmwareapi.vmops [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Created linked-clone VM from snapshot [ 885.322133] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f5993f3-fdbe-47b2-a3c9-5cc785c9f8bd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.330933] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Uploading image b40dc547-7a0e-447f-9bd9-7ed384cf6f5d {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 885.359704] env[62522]: DEBUG oslo_vmware.rw_handles [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 885.359704] env[62522]: value = "vm-489730" [ 885.359704] env[62522]: _type = "VirtualMachine" [ 885.359704] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 885.360060] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-80c32a0e-d087-4e14-80cb-e40e07aa72cc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.370209] env[62522]: DEBUG oslo_vmware.rw_handles [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Lease: (returnval){ [ 885.370209] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52433cc8-c2ec-77bd-bf0d-7dae84ea646d" [ 885.370209] env[62522]: _type = "HttpNfcLease" [ 885.370209] env[62522]: } obtained for exporting VM: (result){ [ 885.370209] env[62522]: value = "vm-489730" [ 885.370209] env[62522]: _type = "VirtualMachine" [ 885.370209] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 885.370547] env[62522]: DEBUG oslo_vmware.api [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the lease: (returnval){ [ 885.370547] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52433cc8-c2ec-77bd-bf0d-7dae84ea646d" [ 885.370547] env[62522]: _type = "HttpNfcLease" [ 885.370547] env[62522]: } to be ready. {{(pid=62522) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 885.380341] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 885.380341] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52433cc8-c2ec-77bd-bf0d-7dae84ea646d" [ 885.380341] env[62522]: _type = "HttpNfcLease" [ 885.380341] env[62522]: } is initializing. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 885.426801] env[62522]: DEBUG oslo_vmware.api [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52757a68-8091-4100-227c-15fe9cf42718, 'name': SearchDatastore_Task, 'duration_secs': 0.010173} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.427734] env[62522]: DEBUG oslo_concurrency.lockutils [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.428071] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] c28d2907-5b59-4df8-91a8-4ba0f2047d89/c28d2907-5b59-4df8-91a8-4ba0f2047d89.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 885.428387] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-81ad663b-210b-4b10-9a02-121e0d2cc020 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.443351] env[62522]: DEBUG oslo_vmware.api [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 885.443351] env[62522]: value = "task-2415622" [ 885.443351] env[62522]: _type = "Task" [ 885.443351] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.451188] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0e98553a-0c69-4f35-9ab8-6211dbccce74 tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Lock "9141ffdd-cbfa-4efe-a01b-dc1326af474c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.897s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.457413] env[62522]: DEBUG oslo_vmware.api [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2415622, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.700610] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68f4774a-4141-493a-9279-a984a3972a47 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.708937] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-828e7348-7cd4-46a6-959f-bf600b237746 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.757512] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b45dd6da-f506-40be-a028-74461404092c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.766364] env[62522]: DEBUG oslo_vmware.api [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415620, 'name': Rename_Task, 'duration_secs': 0.210599} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.769337] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 885.769695] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-27207b05-5bdb-4a49-8c7b-9789a37f16d0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.772639] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0883a5ef-9753-4bb7-871e-d6f934c78495 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.789722] env[62522]: DEBUG nova.compute.provider_tree [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 885.793161] env[62522]: DEBUG oslo_vmware.api [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 885.793161] env[62522]: value = "task-2415623" [ 885.793161] env[62522]: _type = "Task" [ 885.793161] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.809961] env[62522]: DEBUG oslo_vmware.api [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415623, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.881729] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 885.881729] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52433cc8-c2ec-77bd-bf0d-7dae84ea646d" [ 885.881729] env[62522]: _type = "HttpNfcLease" [ 885.881729] env[62522]: } is ready. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 885.882029] env[62522]: DEBUG oslo_vmware.rw_handles [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 885.882029] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52433cc8-c2ec-77bd-bf0d-7dae84ea646d" [ 885.882029] env[62522]: _type = "HttpNfcLease" [ 885.882029] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 885.883261] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df0bd07e-757f-41bf-b895-49b6fc009074 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.894385] env[62522]: DEBUG oslo_vmware.rw_handles [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c4510a-fb31-99f0-d354-74828b7104bf/disk-0.vmdk from lease info. {{(pid=62522) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 885.894597] env[62522]: DEBUG oslo_vmware.rw_handles [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c4510a-fb31-99f0-d354-74828b7104bf/disk-0.vmdk for reading. {{(pid=62522) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 885.962671] env[62522]: DEBUG nova.compute.manager [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 885.980200] env[62522]: DEBUG oslo_vmware.api [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2415622, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.015981] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e95cc799-a230-42b1-82b1-ef0c1daf941d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.202024] env[62522]: DEBUG nova.compute.manager [req-390692c6-f230-40b1-b5b6-d528c2e64694 req-a5b6bed8-4562-461f-ade8-db24aa4ff521 service nova] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Received event network-changed-40bd7b1c-a8fa-4e59-802e-a8392e0d30eb {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 886.202236] env[62522]: DEBUG nova.compute.manager [req-390692c6-f230-40b1-b5b6-d528c2e64694 req-a5b6bed8-4562-461f-ade8-db24aa4ff521 service nova] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Refreshing instance network info cache due to event network-changed-40bd7b1c-a8fa-4e59-802e-a8392e0d30eb. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 886.202450] env[62522]: DEBUG oslo_concurrency.lockutils [req-390692c6-f230-40b1-b5b6-d528c2e64694 req-a5b6bed8-4562-461f-ade8-db24aa4ff521 service nova] Acquiring lock "refresh_cache-9141ffdd-cbfa-4efe-a01b-dc1326af474c" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.202593] env[62522]: DEBUG oslo_concurrency.lockutils [req-390692c6-f230-40b1-b5b6-d528c2e64694 req-a5b6bed8-4562-461f-ade8-db24aa4ff521 service nova] Acquired lock "refresh_cache-9141ffdd-cbfa-4efe-a01b-dc1326af474c" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.202753] env[62522]: DEBUG nova.network.neutron [req-390692c6-f230-40b1-b5b6-d528c2e64694 req-a5b6bed8-4562-461f-ade8-db24aa4ff521 service nova] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Refreshing network info cache for port 40bd7b1c-a8fa-4e59-802e-a8392e0d30eb {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 886.299144] env[62522]: DEBUG nova.scheduler.client.report [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 886.313190] env[62522]: DEBUG oslo_vmware.api [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415623, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.480804] env[62522]: DEBUG oslo_vmware.api [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2415622, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.70635} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.482397] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] c28d2907-5b59-4df8-91a8-4ba0f2047d89/c28d2907-5b59-4df8-91a8-4ba0f2047d89.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 886.482774] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 886.485355] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-12c8366e-6841-464d-a6b4-fb7c7b56a06a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.496265] env[62522]: DEBUG oslo_vmware.api [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 886.496265] env[62522]: value = "task-2415624" [ 886.496265] env[62522]: _type = "Task" [ 886.496265] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.505949] env[62522]: DEBUG oslo_vmware.api [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2415624, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.508164] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.809144] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.190s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.812825] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.209s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.813192] env[62522]: DEBUG nova.objects.instance [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lazy-loading 'resources' on Instance uuid 5ed51dce-2a56-4389-acf8-280bd93ff5f0 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 886.820205] env[62522]: DEBUG oslo_vmware.api [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415623, 'name': PowerOnVM_Task, 'duration_secs': 0.683321} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.823324] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 886.823679] env[62522]: INFO nova.compute.manager [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Took 8.36 seconds to spawn the instance on the hypervisor. [ 886.824150] env[62522]: DEBUG nova.compute.manager [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 886.826306] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3d120c7-00eb-4418-b10a-289837f1b697 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.841029] env[62522]: INFO nova.scheduler.client.report [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Deleted allocations for instance 74e663b1-b552-4b71-aa74-308e908d79e7 [ 887.010633] env[62522]: DEBUG oslo_vmware.api [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2415624, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.148063} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.015120] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 887.016464] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7b9d0af-b847-465c-8414-f17e486f8cab {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.047932] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] c28d2907-5b59-4df8-91a8-4ba0f2047d89/c28d2907-5b59-4df8-91a8-4ba0f2047d89.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 887.048453] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-91ec3c40-bdd0-4f2f-a9d0-94c1a0479cb9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.074034] env[62522]: DEBUG oslo_vmware.api [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 887.074034] env[62522]: value = "task-2415625" [ 887.074034] env[62522]: _type = "Task" [ 887.074034] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.086480] env[62522]: DEBUG oslo_vmware.api [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2415625, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.126675] env[62522]: DEBUG nova.network.neutron [req-390692c6-f230-40b1-b5b6-d528c2e64694 req-a5b6bed8-4562-461f-ade8-db24aa4ff521 service nova] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Updated VIF entry in instance network info cache for port 40bd7b1c-a8fa-4e59-802e-a8392e0d30eb. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 887.127121] env[62522]: DEBUG nova.network.neutron [req-390692c6-f230-40b1-b5b6-d528c2e64694 req-a5b6bed8-4562-461f-ade8-db24aa4ff521 service nova] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Updating instance_info_cache with network_info: [{"id": "40bd7b1c-a8fa-4e59-802e-a8392e0d30eb", "address": "fa:16:3e:13:c0:8d", "network": {"id": "2d13676e-cb66-4166-b85f-b3c122b27f67", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1154652116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0dae444f2b5845aa9264fea1f237f0e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40bd7b1c-a8", "ovs_interfaceid": "40bd7b1c-a8fa-4e59-802e-a8392e0d30eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.348586] env[62522]: INFO nova.compute.manager [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Took 50.25 seconds to build instance. [ 887.355693] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fbb28797-81c9-4802-9d56-91903557bb5e tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "74e663b1-b552-4b71-aa74-308e908d79e7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 49.284s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 887.585660] env[62522]: DEBUG oslo_vmware.api [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2415625, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.630025] env[62522]: DEBUG oslo_concurrency.lockutils [req-390692c6-f230-40b1-b5b6-d528c2e64694 req-a5b6bed8-4562-461f-ade8-db24aa4ff521 service nova] Releasing lock "refresh_cache-9141ffdd-cbfa-4efe-a01b-dc1326af474c" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 887.856223] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e62c8ae4-e7ed-4691-85d2-8f29d24a3f3a tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "fe1f5581-0dec-41e5-a450-c3de5a573602" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.164s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 887.892403] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db49793e-7feb-4212-8848-8e6d9d5ba3ab {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.902721] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66c03e30-8ee9-4f47-bc60-4992c498617b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.936200] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeb6db0b-c5e5-4dc0-a2fb-31921bce1b71 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.944295] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6303809f-b69c-41d5-b947-bf67c43782c1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.958508] env[62522]: DEBUG nova.compute.provider_tree [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 888.086655] env[62522]: DEBUG oslo_vmware.api [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2415625, 'name': ReconfigVM_Task, 'duration_secs': 0.532888} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.086972] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Reconfigured VM instance instance-0000003c to attach disk [datastore1] c28d2907-5b59-4df8-91a8-4ba0f2047d89/c28d2907-5b59-4df8-91a8-4ba0f2047d89.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 888.087781] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-630b1450-093b-4d8b-8fe4-84414efdcaf9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.095932] env[62522]: DEBUG oslo_vmware.api [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 888.095932] env[62522]: value = "task-2415626" [ 888.095932] env[62522]: _type = "Task" [ 888.095932] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.105780] env[62522]: DEBUG oslo_vmware.api [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2415626, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.359954] env[62522]: DEBUG nova.compute.manager [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 888.464495] env[62522]: DEBUG nova.scheduler.client.report [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 888.606929] env[62522]: DEBUG oslo_vmware.api [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2415626, 'name': Rename_Task, 'duration_secs': 0.31773} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.607326] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 888.607590] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d6a94817-075e-4dc7-b3cc-4edde7cb068e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.616331] env[62522]: DEBUG oslo_vmware.api [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 888.616331] env[62522]: value = "task-2415627" [ 888.616331] env[62522]: _type = "Task" [ 888.616331] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.626149] env[62522]: DEBUG oslo_vmware.api [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2415627, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.888573] env[62522]: DEBUG oslo_concurrency.lockutils [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.970468] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.159s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.972910] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 45.371s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.974576] env[62522]: INFO nova.compute.claims [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 888.993467] env[62522]: INFO nova.scheduler.client.report [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Deleted allocations for instance 5ed51dce-2a56-4389-acf8-280bd93ff5f0 [ 889.129161] env[62522]: DEBUG oslo_vmware.api [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2415627, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.502613] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3c05c7ec-80b6-48be-bf3b-b611e95a8752 tempest-MultipleCreateTestJSON-866058691 tempest-MultipleCreateTestJSON-866058691-project-member] Lock "5ed51dce-2a56-4389-acf8-280bd93ff5f0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 51.958s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.572239] env[62522]: INFO nova.compute.manager [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Rebuilding instance [ 889.616809] env[62522]: DEBUG oslo_concurrency.lockutils [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Acquiring lock "cce5f0d4-364d-4295-a27d-44ca8585f803" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.617092] env[62522]: DEBUG oslo_concurrency.lockutils [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Lock "cce5f0d4-364d-4295-a27d-44ca8585f803" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.617312] env[62522]: DEBUG oslo_concurrency.lockutils [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Acquiring lock "cce5f0d4-364d-4295-a27d-44ca8585f803-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.617493] env[62522]: DEBUG oslo_concurrency.lockutils [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Lock "cce5f0d4-364d-4295-a27d-44ca8585f803-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.617660] env[62522]: DEBUG oslo_concurrency.lockutils [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Lock "cce5f0d4-364d-4295-a27d-44ca8585f803-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.623144] env[62522]: INFO nova.compute.manager [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Terminating instance [ 889.631426] env[62522]: DEBUG nova.compute.manager [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 889.632177] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f426eba6-9b38-4052-b4ad-094ecd58202a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.639246] env[62522]: DEBUG oslo_vmware.api [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2415627, 'name': PowerOnVM_Task, 'duration_secs': 0.648926} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.640437] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 889.640660] env[62522]: INFO nova.compute.manager [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Took 8.54 seconds to spawn the instance on the hypervisor. [ 889.640840] env[62522]: DEBUG nova.compute.manager [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 889.642426] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e16b4e7-fcc4-4f90-a68e-6b86a54e2f2a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.735170] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Acquiring lock "8539afc0-1753-4c37-9fc9-25ec97b97243" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.735624] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Lock "8539afc0-1753-4c37-9fc9-25ec97b97243" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.135966] env[62522]: DEBUG nova.compute.manager [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 890.136208] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 890.137094] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da0d88de-fc7b-4cdd-b99b-eecb5e89b6f7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.147259] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 890.147513] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-60ed79e4-8799-42fe-be03-da3327c1d76d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.154824] env[62522]: DEBUG oslo_vmware.api [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Waiting for the task: (returnval){ [ 890.154824] env[62522]: value = "task-2415628" [ 890.154824] env[62522]: _type = "Task" [ 890.154824] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.170776] env[62522]: INFO nova.compute.manager [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Took 49.37 seconds to build instance. [ 890.177742] env[62522]: DEBUG oslo_vmware.api [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415628, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.478201] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9a30a9c-89dd-48e4-b511-ac0696716b15 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.486397] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f73d3c-73ce-4900-9634-87dcace7903f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.522404] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59946ca8-e823-4b6c-8432-559b10ed406e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.530608] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89008069-204b-48c9-867d-1922adb8d22d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.544168] env[62522]: DEBUG nova.compute.provider_tree [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 890.664495] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 890.664777] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2c286194-dacc-4c5c-b885-ea4866f1f1c5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.672053] env[62522]: DEBUG oslo_vmware.api [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415628, 'name': PowerOffVM_Task, 'duration_secs': 0.378814} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.674453] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 890.674453] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 890.674453] env[62522]: DEBUG oslo_concurrency.lockutils [None req-41885c48-88f8-4eda-a0ab-dff98c21f64a tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "c28d2907-5b59-4df8-91a8-4ba0f2047d89" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.895s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.674453] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 890.674453] env[62522]: value = "task-2415629" [ 890.674453] env[62522]: _type = "Task" [ 890.674453] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.674686] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f8231bad-715e-4035-9c0d-f700154f262b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.688579] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415629, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.752052] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 890.752317] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 890.753136] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Deleting the datastore file [datastore1] cce5f0d4-364d-4295-a27d-44ca8585f803 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 890.753136] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ef5da349-da5c-4795-b3ca-496cc26c9eb7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.760124] env[62522]: DEBUG oslo_vmware.api [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Waiting for the task: (returnval){ [ 890.760124] env[62522]: value = "task-2415631" [ 890.760124] env[62522]: _type = "Task" [ 890.760124] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.770726] env[62522]: DEBUG oslo_vmware.api [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415631, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.047220] env[62522]: DEBUG nova.scheduler.client.report [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 891.182890] env[62522]: DEBUG nova.compute.manager [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 891.192210] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415629, 'name': PowerOffVM_Task, 'duration_secs': 0.247266} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.193246] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 891.193246] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 891.193572] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cd27977-0bb2-46bf-b45f-fc859c5229f3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.201910] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 891.201910] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-835dec7f-9042-4af2-ab47-e6fcded7d2ce {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.273135] env[62522]: DEBUG oslo_vmware.api [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Task: {'id': task-2415631, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.224291} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.274499] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 891.274728] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 891.274966] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 891.275176] env[62522]: INFO nova.compute.manager [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Took 1.14 seconds to destroy the instance on the hypervisor. [ 891.275452] env[62522]: DEBUG oslo.service.loopingcall [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 891.275686] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 891.275865] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 891.276044] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Deleting the datastore file [datastore1] fe1f5581-0dec-41e5-a450-c3de5a573602 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 891.276290] env[62522]: DEBUG nova.compute.manager [-] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 891.276383] env[62522]: DEBUG nova.network.neutron [-] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 891.278173] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a2a344d2-0df7-4f6f-8092-98474f7b75b8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.285071] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 891.285071] env[62522]: value = "task-2415633" [ 891.285071] env[62522]: _type = "Task" [ 891.285071] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.295448] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415633, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.560082] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.587s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.560598] env[62522]: DEBUG nova.compute.manager [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 891.566344] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 46.537s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.570132] env[62522]: INFO nova.compute.claims [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 891.667111] env[62522]: DEBUG nova.compute.manager [req-266edc56-8a2d-4104-b3f6-6ef7093a9cbd req-997bd5a4-0196-493e-9e17-6e9e28e32e22 service nova] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Received event network-vif-deleted-ca4e7776-76bf-40fc-ac2a-ac8917ca2978 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 891.667111] env[62522]: INFO nova.compute.manager [req-266edc56-8a2d-4104-b3f6-6ef7093a9cbd req-997bd5a4-0196-493e-9e17-6e9e28e32e22 service nova] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Neutron deleted interface ca4e7776-76bf-40fc-ac2a-ac8917ca2978; detaching it from the instance and deleting it from the info cache [ 891.667111] env[62522]: DEBUG nova.network.neutron [req-266edc56-8a2d-4104-b3f6-6ef7093a9cbd req-997bd5a4-0196-493e-9e17-6e9e28e32e22 service nova] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.713855] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 891.795018] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415633, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.268939} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.795425] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 891.795515] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 891.795695] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 891.836112] env[62522]: DEBUG nova.compute.manager [req-1dab6f52-7510-423e-bae2-f5b2cd8e35d8 req-2934bc32-ebfb-4d3b-8122-5863f8901680 service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Received event network-changed-931dfe44-9ac3-4df4-a4ea-6c8612389451 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 891.836364] env[62522]: DEBUG nova.compute.manager [req-1dab6f52-7510-423e-bae2-f5b2cd8e35d8 req-2934bc32-ebfb-4d3b-8122-5863f8901680 service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Refreshing instance network info cache due to event network-changed-931dfe44-9ac3-4df4-a4ea-6c8612389451. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 891.836690] env[62522]: DEBUG oslo_concurrency.lockutils [req-1dab6f52-7510-423e-bae2-f5b2cd8e35d8 req-2934bc32-ebfb-4d3b-8122-5863f8901680 service nova] Acquiring lock "refresh_cache-c28d2907-5b59-4df8-91a8-4ba0f2047d89" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 891.836870] env[62522]: DEBUG oslo_concurrency.lockutils [req-1dab6f52-7510-423e-bae2-f5b2cd8e35d8 req-2934bc32-ebfb-4d3b-8122-5863f8901680 service nova] Acquired lock "refresh_cache-c28d2907-5b59-4df8-91a8-4ba0f2047d89" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.837586] env[62522]: DEBUG nova.network.neutron [req-1dab6f52-7510-423e-bae2-f5b2cd8e35d8 req-2934bc32-ebfb-4d3b-8122-5863f8901680 service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Refreshing network info cache for port 931dfe44-9ac3-4df4-a4ea-6c8612389451 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 892.075314] env[62522]: DEBUG nova.compute.utils [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 892.079231] env[62522]: DEBUG nova.compute.manager [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 892.080065] env[62522]: DEBUG nova.network.neutron [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 892.143646] env[62522]: DEBUG nova.network.neutron [-] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.171505] env[62522]: DEBUG nova.policy [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f1f4b0fffebf4a31bf6c37227c3f56c1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5c08a602a8fe4b4396543ac75ac40e7b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 892.174097] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b7f6a9fb-2996-4197-951e-39ce2f7b1474 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.183636] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73fda97f-1c0d-43ba-a487-6121335c8b9d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.219619] env[62522]: DEBUG nova.compute.manager [req-266edc56-8a2d-4104-b3f6-6ef7093a9cbd req-997bd5a4-0196-493e-9e17-6e9e28e32e22 service nova] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Detach interface failed, port_id=ca4e7776-76bf-40fc-ac2a-ac8917ca2978, reason: Instance cce5f0d4-364d-4295-a27d-44ca8585f803 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 892.580665] env[62522]: DEBUG nova.compute.manager [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 892.622044] env[62522]: DEBUG nova.network.neutron [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Successfully created port: 59c9ae48-dc88-4de9-ba91-f62a004a177c {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 892.646326] env[62522]: INFO nova.compute.manager [-] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Took 1.37 seconds to deallocate network for instance. [ 892.781141] env[62522]: DEBUG nova.network.neutron [req-1dab6f52-7510-423e-bae2-f5b2cd8e35d8 req-2934bc32-ebfb-4d3b-8122-5863f8901680 service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Updated VIF entry in instance network info cache for port 931dfe44-9ac3-4df4-a4ea-6c8612389451. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 892.781141] env[62522]: DEBUG nova.network.neutron [req-1dab6f52-7510-423e-bae2-f5b2cd8e35d8 req-2934bc32-ebfb-4d3b-8122-5863f8901680 service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Updating instance_info_cache with network_info: [{"id": "931dfe44-9ac3-4df4-a4ea-6c8612389451", "address": "fa:16:3e:f5:6d:32", "network": {"id": "949f3536-8a7e-4edf-b6cc-6a264fe5fe83", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1891232839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f93394feaa4f4b61a5d3d670d32ec599", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap931dfe44-9a", "ovs_interfaceid": "931dfe44-9ac3-4df4-a4ea-6c8612389451", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.835753] env[62522]: DEBUG nova.virt.hardware [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 892.835753] env[62522]: DEBUG nova.virt.hardware [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 892.835753] env[62522]: DEBUG nova.virt.hardware [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 892.836823] env[62522]: DEBUG nova.virt.hardware [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 892.837165] env[62522]: DEBUG nova.virt.hardware [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 892.837435] env[62522]: DEBUG nova.virt.hardware [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 892.837784] env[62522]: DEBUG nova.virt.hardware [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 892.838097] env[62522]: DEBUG nova.virt.hardware [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 892.838392] env[62522]: DEBUG nova.virt.hardware [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 892.838695] env[62522]: DEBUG nova.virt.hardware [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 892.839042] env[62522]: DEBUG nova.virt.hardware [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 892.841163] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-542e3cd2-44a9-41f9-b7bf-6be34507dd17 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.856580] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e45bd07c-c62f-4340-a5f1-55c475e8823e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.874192] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:f1:2d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7654928b-7afe-42e3-a18d-68ecc775cefe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '15fdf0ad-85f9-4e6d-ace5-5a462f025e45', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 892.882850] env[62522]: DEBUG oslo.service.loopingcall [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 892.886015] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 892.886462] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ecf67c28-8c0d-4629-89e1-25b25ad1fcb9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.912860] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 892.912860] env[62522]: value = "task-2415634" [ 892.912860] env[62522]: _type = "Task" [ 892.912860] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.920835] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415634, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.089851] env[62522]: INFO nova.virt.block_device [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Booting with volume 5a72a163-f70c-478d-aff3-2a748c2d25d5 at /dev/sda [ 893.154159] env[62522]: DEBUG oslo_concurrency.lockutils [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.170804] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d2d82e07-1986-4a10-8eee-6d079fe0e809 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.184110] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-773dc76f-8bf6-4cf6-b188-35dd0c12999d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.196675] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e3d2e87-684c-420e-83af-679ec8e47eaf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.205518] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49ca76ac-0c8c-4c73-993c-98705bec4532 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.253984] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e5b609e-eae4-4540-8f64-912fb2aa1671 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.256741] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0c93c768-ddb7-4c37-bbde-1eb076d67f46 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.267815] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-226e5d62-f75c-4797-9872-85b7760d18b3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.271697] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dafcd52-a8bd-4778-9f03-e87f392acdce {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.284890] env[62522]: DEBUG oslo_concurrency.lockutils [req-1dab6f52-7510-423e-bae2-f5b2cd8e35d8 req-2934bc32-ebfb-4d3b-8122-5863f8901680 service nova] Releasing lock "refresh_cache-c28d2907-5b59-4df8-91a8-4ba0f2047d89" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.294928] env[62522]: DEBUG nova.compute.provider_tree [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 893.310475] env[62522]: DEBUG nova.scheduler.client.report [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 893.317014] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b453c68a-f377-43a9-bd15-059e78af688f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.322661] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2db95810-590c-42ea-ac31-26cb5f59cb0c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.337850] env[62522]: DEBUG nova.virt.block_device [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Updating existing volume attachment record: 644ee767-6eaf-484a-bb19-b359b10aee48 {{(pid=62522) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 893.423186] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415634, 'name': CreateVM_Task, 'duration_secs': 0.463407} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.423359] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 893.424058] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 893.424223] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.424540] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 893.424796] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f5c3615-3345-456e-a985-f89367308e35 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.429533] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 893.429533] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c49689-c579-19d6-9247-360c7b54c472" [ 893.429533] env[62522]: _type = "Task" [ 893.429533] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.437184] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c49689-c579-19d6-9247-360c7b54c472, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.810779] env[62522]: DEBUG oslo_vmware.rw_handles [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c4510a-fb31-99f0-d354-74828b7104bf/disk-0.vmdk. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 893.811888] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54158900-9bb3-4637-8b85-6cce362ad736 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.818641] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.252s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.819133] env[62522]: DEBUG nova.compute.manager [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 893.821813] env[62522]: DEBUG oslo_vmware.rw_handles [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c4510a-fb31-99f0-d354-74828b7104bf/disk-0.vmdk is in state: ready. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 893.821894] env[62522]: ERROR oslo_vmware.rw_handles [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c4510a-fb31-99f0-d354-74828b7104bf/disk-0.vmdk due to incomplete transfer. [ 893.822373] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 48.519s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.822601] env[62522]: DEBUG nova.objects.instance [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Lazy-loading 'resources' on Instance uuid 7a086314-3e49-48e9-82c9-cead8ecb19d1 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 893.825528] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-80478bf2-6d9e-4a3b-bb97-51bc1f27d5db {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.832513] env[62522]: DEBUG oslo_vmware.rw_handles [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c4510a-fb31-99f0-d354-74828b7104bf/disk-0.vmdk. {{(pid=62522) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 893.832593] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Uploaded image b40dc547-7a0e-447f-9bd9-7ed384cf6f5d to the Glance image server {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 893.834915] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Destroying the VM {{(pid=62522) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 893.835406] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-5d090f01-2e0b-4bda-8462-468a368e2c9f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.842736] env[62522]: DEBUG oslo_vmware.api [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the task: (returnval){ [ 893.842736] env[62522]: value = "task-2415635" [ 893.842736] env[62522]: _type = "Task" [ 893.842736] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.851201] env[62522]: DEBUG oslo_vmware.api [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415635, 'name': Destroy_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.944282] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c49689-c579-19d6-9247-360c7b54c472, 'name': SearchDatastore_Task, 'duration_secs': 0.029061} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.944741] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.945297] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 893.945628] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 893.946146] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.946541] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 893.946915] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6131d1ef-75e9-41ca-a2a4-2b0d1b4455e1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.961984] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 893.962273] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 893.963021] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed64634c-17c6-4855-aa8b-393cd6bd96f9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.968594] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 893.968594] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bb8963-cfdb-5694-4184-e778b2b106e9" [ 893.968594] env[62522]: _type = "Task" [ 893.968594] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.978590] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bb8963-cfdb-5694-4184-e778b2b106e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.324497] env[62522]: DEBUG nova.compute.utils [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 894.325901] env[62522]: DEBUG nova.compute.manager [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 894.326078] env[62522]: DEBUG nova.network.neutron [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 894.357699] env[62522]: DEBUG oslo_vmware.api [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415635, 'name': Destroy_Task, 'duration_secs': 0.334682} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.358346] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Destroyed the VM [ 894.358346] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Deleting Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 894.358588] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-28d19a27-ea6a-4889-9b02-b488ab146fa1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.368225] env[62522]: DEBUG oslo_vmware.api [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the task: (returnval){ [ 894.368225] env[62522]: value = "task-2415636" [ 894.368225] env[62522]: _type = "Task" [ 894.368225] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.378349] env[62522]: DEBUG oslo_vmware.api [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415636, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.389330] env[62522]: DEBUG nova.compute.manager [req-fd7a92ab-54ed-46a8-a34e-a213850b6677 req-7f776d2f-d163-4965-a49b-3bbed0ca6b73 service nova] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Received event network-vif-plugged-59c9ae48-dc88-4de9-ba91-f62a004a177c {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 894.389568] env[62522]: DEBUG oslo_concurrency.lockutils [req-fd7a92ab-54ed-46a8-a34e-a213850b6677 req-7f776d2f-d163-4965-a49b-3bbed0ca6b73 service nova] Acquiring lock "35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.389839] env[62522]: DEBUG oslo_concurrency.lockutils [req-fd7a92ab-54ed-46a8-a34e-a213850b6677 req-7f776d2f-d163-4965-a49b-3bbed0ca6b73 service nova] Lock "35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.390086] env[62522]: DEBUG oslo_concurrency.lockutils [req-fd7a92ab-54ed-46a8-a34e-a213850b6677 req-7f776d2f-d163-4965-a49b-3bbed0ca6b73 service nova] Lock "35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.390280] env[62522]: DEBUG nova.compute.manager [req-fd7a92ab-54ed-46a8-a34e-a213850b6677 req-7f776d2f-d163-4965-a49b-3bbed0ca6b73 service nova] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] No waiting events found dispatching network-vif-plugged-59c9ae48-dc88-4de9-ba91-f62a004a177c {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 894.390858] env[62522]: WARNING nova.compute.manager [req-fd7a92ab-54ed-46a8-a34e-a213850b6677 req-7f776d2f-d163-4965-a49b-3bbed0ca6b73 service nova] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Received unexpected event network-vif-plugged-59c9ae48-dc88-4de9-ba91-f62a004a177c for instance with vm_state building and task_state block_device_mapping. [ 894.417109] env[62522]: DEBUG nova.policy [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3549d85b612044969af8fda179d169ba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61314d3f0b9e4c368312e714a953e549', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 894.486229] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bb8963-cfdb-5694-4184-e778b2b106e9, 'name': SearchDatastore_Task, 'duration_secs': 0.036936} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.488328] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54efb41c-b27b-41ac-b44c-b1dabe6db723 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.499617] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 894.499617] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52ee8752-c316-46e0-0320-f983fc006dd8" [ 894.499617] env[62522]: _type = "Task" [ 894.499617] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.507878] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52ee8752-c316-46e0-0320-f983fc006dd8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.594070] env[62522]: DEBUG nova.network.neutron [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Successfully updated port: 59c9ae48-dc88-4de9-ba91-f62a004a177c {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 894.835403] env[62522]: DEBUG nova.compute.manager [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 894.862324] env[62522]: DEBUG nova.network.neutron [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Successfully created port: cecb41e7-0c40-40fd-b130-fc0afe3fba0d {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 894.886322] env[62522]: DEBUG oslo_vmware.api [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415636, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.975288] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a43a10fb-96d1-40d0-b9ae-b5843d1f2511 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.984231] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef38ed38-b87b-403a-adb3-f03b90df664f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.023274] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee0d7647-4504-4efb-bb3b-83aea698d5ed {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.033973] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52ee8752-c316-46e0-0320-f983fc006dd8, 'name': SearchDatastore_Task, 'duration_secs': 0.013326} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.036303] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 895.036611] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] fe1f5581-0dec-41e5-a450-c3de5a573602/fe1f5581-0dec-41e5-a450-c3de5a573602.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 895.037367] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d4e37866-c160-4e04-bf21-3759e25d444a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.041046] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ba2ad9d-4a2e-4f89-801d-6a6a3215624b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.058828] env[62522]: DEBUG nova.compute.provider_tree [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 895.061009] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 895.061009] env[62522]: value = "task-2415637" [ 895.061009] env[62522]: _type = "Task" [ 895.061009] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.068803] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415637, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.096376] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Acquiring lock "refresh_cache-35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.096534] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Acquired lock "refresh_cache-35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.096691] env[62522]: DEBUG nova.network.neutron [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 895.381466] env[62522]: DEBUG oslo_vmware.api [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415636, 'name': RemoveSnapshot_Task, 'duration_secs': 0.651182} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.381769] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Deleted Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 895.382016] env[62522]: INFO nova.compute.manager [None req-b3be9394-527e-48cc-9fcc-e3e296a717fd tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Took 14.71 seconds to snapshot the instance on the hypervisor. [ 895.443571] env[62522]: DEBUG nova.compute.manager [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 895.444118] env[62522]: DEBUG nova.virt.hardware [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 895.444340] env[62522]: DEBUG nova.virt.hardware [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 895.444496] env[62522]: DEBUG nova.virt.hardware [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 895.444674] env[62522]: DEBUG nova.virt.hardware [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 895.444816] env[62522]: DEBUG nova.virt.hardware [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 895.444957] env[62522]: DEBUG nova.virt.hardware [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 895.445183] env[62522]: DEBUG nova.virt.hardware [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 895.445331] env[62522]: DEBUG nova.virt.hardware [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 895.447107] env[62522]: DEBUG nova.virt.hardware [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 895.447149] env[62522]: DEBUG nova.virt.hardware [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 895.447332] env[62522]: DEBUG nova.virt.hardware [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 895.448551] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1670a575-6cf1-4d12-bc22-8cd31aa3cfd4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.458389] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b8a85d0-581d-4ed3-9996-b5b83fbce61c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.565190] env[62522]: DEBUG nova.scheduler.client.report [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 895.577970] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415637, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.633845] env[62522]: DEBUG nova.network.neutron [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 895.819493] env[62522]: DEBUG nova.network.neutron [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Updating instance_info_cache with network_info: [{"id": "59c9ae48-dc88-4de9-ba91-f62a004a177c", "address": "fa:16:3e:49:66:a2", "network": {"id": "fe5f9959-d5b5-47d4-bfa0-689a4f70bf12", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-291790058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c08a602a8fe4b4396543ac75ac40e7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59c9ae48-dc", "ovs_interfaceid": "59c9ae48-dc88-4de9-ba91-f62a004a177c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.849122] env[62522]: DEBUG nova.compute.manager [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 895.881979] env[62522]: DEBUG nova.virt.hardware [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 895.881979] env[62522]: DEBUG nova.virt.hardware [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 895.881979] env[62522]: DEBUG nova.virt.hardware [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 895.881979] env[62522]: DEBUG nova.virt.hardware [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 895.882325] env[62522]: DEBUG nova.virt.hardware [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 895.882612] env[62522]: DEBUG nova.virt.hardware [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 895.883058] env[62522]: DEBUG nova.virt.hardware [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 895.883364] env[62522]: DEBUG nova.virt.hardware [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 895.883741] env[62522]: DEBUG nova.virt.hardware [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 895.884162] env[62522]: DEBUG nova.virt.hardware [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 895.886552] env[62522]: DEBUG nova.virt.hardware [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 895.886552] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a211b3d-d08b-4203-8518-a05a14c07ff7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.901201] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0303577-b900-470f-b82d-3e2f9ecec226 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.075026] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.252s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.077038] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 46.688s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.081585] env[62522]: INFO nova.compute.claims [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 896.089579] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415637, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.02188} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.089579] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] fe1f5581-0dec-41e5-a450-c3de5a573602/fe1f5581-0dec-41e5-a450-c3de5a573602.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 896.089579] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 896.089579] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-58dbf19f-6069-41b0-b4e5-40191715bbb2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.096019] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 896.096019] env[62522]: value = "task-2415638" [ 896.096019] env[62522]: _type = "Task" [ 896.096019] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.101707] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415638, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.115204] env[62522]: INFO nova.scheduler.client.report [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Deleted allocations for instance 7a086314-3e49-48e9-82c9-cead8ecb19d1 [ 896.326261] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Releasing lock "refresh_cache-35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.326261] env[62522]: DEBUG nova.compute.manager [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Instance network_info: |[{"id": "59c9ae48-dc88-4de9-ba91-f62a004a177c", "address": "fa:16:3e:49:66:a2", "network": {"id": "fe5f9959-d5b5-47d4-bfa0-689a4f70bf12", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-291790058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c08a602a8fe4b4396543ac75ac40e7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59c9ae48-dc", "ovs_interfaceid": "59c9ae48-dc88-4de9-ba91-f62a004a177c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 896.326261] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:49:66:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ff90ec9-3c7e-4e76-b409-fcf37fc588d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '59c9ae48-dc88-4de9-ba91-f62a004a177c', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 896.336355] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Creating folder: Project (5c08a602a8fe4b4396543ac75ac40e7b). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 896.336355] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ae44b4bb-13fe-46bb-a1dc-15e12edad9a0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.353023] env[62522]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 896.353023] env[62522]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62522) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 896.353023] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Folder already exists: Project (5c08a602a8fe4b4396543ac75ac40e7b). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 896.353023] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Creating folder: Instances. Parent ref: group-v489680. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 896.353023] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a3e9b7f3-b0c3-4fd7-b172-7ea21c773972 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.365023] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Created folder: Instances in parent group-v489680. [ 896.365023] env[62522]: DEBUG oslo.service.loopingcall [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 896.365023] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 896.365023] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aa43ca69-fe65-4d14-89d7-d9f426ff37a6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.388127] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 896.388127] env[62522]: value = "task-2415641" [ 896.388127] env[62522]: _type = "Task" [ 896.388127] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.397236] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415641, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.606632] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415638, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066999} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.606987] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 896.608155] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9a70ed4-69a0-4825-a5a8-e46788d8f56c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.638064] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Reconfiguring VM instance instance-0000003b to attach disk [datastore2] fe1f5581-0dec-41e5-a450-c3de5a573602/fe1f5581-0dec-41e5-a450-c3de5a573602.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 896.638830] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ce550d92-9273-4fb5-a268-a52c3b663b30 tempest-ServersNegativeTestMultiTenantJSON-936850785 tempest-ServersNegativeTestMultiTenantJSON-936850785-project-member] Lock "7a086314-3e49-48e9-82c9-cead8ecb19d1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 55.143s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.639997] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-60e682a6-9963-4e4a-9c64-99377384bd3a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.666646] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 896.666646] env[62522]: value = "task-2415642" [ 896.666646] env[62522]: _type = "Task" [ 896.666646] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.671100] env[62522]: DEBUG nova.network.neutron [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Successfully updated port: cecb41e7-0c40-40fd-b130-fc0afe3fba0d {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 896.679480] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415642, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.781557] env[62522]: DEBUG nova.compute.manager [req-d6f0e5bf-bf38-4416-a143-87fc900081de req-636d23d7-12ea-45d2-95b4-2a2631503180 service nova] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Received event network-changed-59c9ae48-dc88-4de9-ba91-f62a004a177c {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 896.781557] env[62522]: DEBUG nova.compute.manager [req-d6f0e5bf-bf38-4416-a143-87fc900081de req-636d23d7-12ea-45d2-95b4-2a2631503180 service nova] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Refreshing instance network info cache due to event network-changed-59c9ae48-dc88-4de9-ba91-f62a004a177c. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 896.781557] env[62522]: DEBUG oslo_concurrency.lockutils [req-d6f0e5bf-bf38-4416-a143-87fc900081de req-636d23d7-12ea-45d2-95b4-2a2631503180 service nova] Acquiring lock "refresh_cache-35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.781557] env[62522]: DEBUG oslo_concurrency.lockutils [req-d6f0e5bf-bf38-4416-a143-87fc900081de req-636d23d7-12ea-45d2-95b4-2a2631503180 service nova] Acquired lock "refresh_cache-35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.781557] env[62522]: DEBUG nova.network.neutron [req-d6f0e5bf-bf38-4416-a143-87fc900081de req-636d23d7-12ea-45d2-95b4-2a2631503180 service nova] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Refreshing network info cache for port 59c9ae48-dc88-4de9-ba91-f62a004a177c {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 896.832733] env[62522]: DEBUG nova.compute.manager [req-64ed8133-34e6-40b0-88cb-f62dd2765781 req-5bc04dce-e0ef-4abb-ad5a-3b77da96be8e service nova] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Received event network-vif-plugged-cecb41e7-0c40-40fd-b130-fc0afe3fba0d {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 896.832950] env[62522]: DEBUG oslo_concurrency.lockutils [req-64ed8133-34e6-40b0-88cb-f62dd2765781 req-5bc04dce-e0ef-4abb-ad5a-3b77da96be8e service nova] Acquiring lock "845f99b8-4a9d-4fbe-89e1-825a5ddd01f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.833250] env[62522]: DEBUG oslo_concurrency.lockutils [req-64ed8133-34e6-40b0-88cb-f62dd2765781 req-5bc04dce-e0ef-4abb-ad5a-3b77da96be8e service nova] Lock "845f99b8-4a9d-4fbe-89e1-825a5ddd01f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.833426] env[62522]: DEBUG oslo_concurrency.lockutils [req-64ed8133-34e6-40b0-88cb-f62dd2765781 req-5bc04dce-e0ef-4abb-ad5a-3b77da96be8e service nova] Lock "845f99b8-4a9d-4fbe-89e1-825a5ddd01f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.833721] env[62522]: DEBUG nova.compute.manager [req-64ed8133-34e6-40b0-88cb-f62dd2765781 req-5bc04dce-e0ef-4abb-ad5a-3b77da96be8e service nova] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] No waiting events found dispatching network-vif-plugged-cecb41e7-0c40-40fd-b130-fc0afe3fba0d {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 896.833781] env[62522]: WARNING nova.compute.manager [req-64ed8133-34e6-40b0-88cb-f62dd2765781 req-5bc04dce-e0ef-4abb-ad5a-3b77da96be8e service nova] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Received unexpected event network-vif-plugged-cecb41e7-0c40-40fd-b130-fc0afe3fba0d for instance with vm_state building and task_state spawning. [ 896.898026] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415641, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.151690] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Acquiring lock "e60d5286-04dd-42bb-ae50-26b0a763d2bc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.151690] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Lock "e60d5286-04dd-42bb-ae50-26b0a763d2bc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.175818] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "refresh_cache-845f99b8-4a9d-4fbe-89e1-825a5ddd01f2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.175903] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquired lock "refresh_cache-845f99b8-4a9d-4fbe-89e1-825a5ddd01f2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.176157] env[62522]: DEBUG nova.network.neutron [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 897.181362] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415642, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.406544] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415641, 'name': CreateVM_Task, 'duration_secs': 0.781116} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.406709] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 897.407390] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'mount_device': '/dev/sda', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489690', 'volume_id': '5a72a163-f70c-478d-aff3-2a748c2d25d5', 'name': 'volume-5a72a163-f70c-478d-aff3-2a748c2d25d5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6', 'attached_at': '', 'detached_at': '', 'volume_id': '5a72a163-f70c-478d-aff3-2a748c2d25d5', 'serial': '5a72a163-f70c-478d-aff3-2a748c2d25d5'}, 'attachment_id': '644ee767-6eaf-484a-bb19-b359b10aee48', 'delete_on_termination': True, 'guest_format': None, 'device_type': None, 'boot_index': 0, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=62522) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 897.407591] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Root volume attach. Driver type: vmdk {{(pid=62522) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 897.410165] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a82aceb7-8e34-4427-8c4c-211898eb61bb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.426698] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-729fc8f7-2d16-4c50-b6b2-ce33f86683e7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.432907] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-870ae9be-31be-428f-88ab-b5e8e14ede7c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.441912] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-afc8dd1f-c960-478c-82d4-4410d0376810 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.452582] env[62522]: DEBUG oslo_vmware.api [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Waiting for the task: (returnval){ [ 897.452582] env[62522]: value = "task-2415643" [ 897.452582] env[62522]: _type = "Task" [ 897.452582] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.459651] env[62522]: DEBUG oslo_vmware.api [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Task: {'id': task-2415643, 'name': RelocateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.643089] env[62522]: DEBUG nova.network.neutron [req-d6f0e5bf-bf38-4416-a143-87fc900081de req-636d23d7-12ea-45d2-95b4-2a2631503180 service nova] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Updated VIF entry in instance network info cache for port 59c9ae48-dc88-4de9-ba91-f62a004a177c. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 897.643263] env[62522]: DEBUG nova.network.neutron [req-d6f0e5bf-bf38-4416-a143-87fc900081de req-636d23d7-12ea-45d2-95b4-2a2631503180 service nova] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Updating instance_info_cache with network_info: [{"id": "59c9ae48-dc88-4de9-ba91-f62a004a177c", "address": "fa:16:3e:49:66:a2", "network": {"id": "fe5f9959-d5b5-47d4-bfa0-689a4f70bf12", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-291790058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c08a602a8fe4b4396543ac75ac40e7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59c9ae48-dc", "ovs_interfaceid": "59c9ae48-dc88-4de9-ba91-f62a004a177c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.656629] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbf592b2-032f-4172-83e2-44bb208bc076 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.664906] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6e23ca1-b309-4c48-a14c-4ddcf7a15be3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.707324] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf9fd0ec-72c8-43b9-96a0-ad33ef2efd5b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.710198] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415642, 'name': ReconfigVM_Task, 'duration_secs': 0.692178} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.710866] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Reconfigured VM instance instance-0000003b to attach disk [datastore2] fe1f5581-0dec-41e5-a450-c3de5a573602/fe1f5581-0dec-41e5-a450-c3de5a573602.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 897.712783] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b6c88fce-0b5b-40f7-bca3-de17ffa5db01 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.719142] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c044b5-e4e9-4552-afdb-0f9cef756a5d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.725137] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 897.725137] env[62522]: value = "task-2415644" [ 897.725137] env[62522]: _type = "Task" [ 897.725137] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.737705] env[62522]: DEBUG nova.compute.provider_tree [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 897.746791] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415644, 'name': Rename_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.754391] env[62522]: DEBUG nova.compute.manager [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 897.759375] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1689440e-09d1-4269-92e1-d3c0a3c4f2f0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.762350] env[62522]: DEBUG nova.network.neutron [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 897.965949] env[62522]: DEBUG oslo_vmware.api [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Task: {'id': task-2415643, 'name': RelocateVM_Task} progress is 42%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.054922] env[62522]: DEBUG nova.network.neutron [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Updating instance_info_cache with network_info: [{"id": "cecb41e7-0c40-40fd-b130-fc0afe3fba0d", "address": "fa:16:3e:d5:32:20", "network": {"id": "d6a06fb0-929f-44b6-93c4-698be8498194", "bridge": "br-int", "label": "tempest-ImagesTestJSON-272550236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61314d3f0b9e4c368312e714a953e549", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "399f3826-705c-45f7-9fe0-3a08a945151a", "external-id": "nsx-vlan-transportzone-936", "segmentation_id": 936, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcecb41e7-0c", "ovs_interfaceid": "cecb41e7-0c40-40fd-b130-fc0afe3fba0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.148757] env[62522]: DEBUG oslo_concurrency.lockutils [req-d6f0e5bf-bf38-4416-a143-87fc900081de req-636d23d7-12ea-45d2-95b4-2a2631503180 service nova] Releasing lock "refresh_cache-35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.243619] env[62522]: DEBUG nova.scheduler.client.report [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 898.247201] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415644, 'name': Rename_Task, 'duration_secs': 0.17118} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.247569] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 898.247927] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a56d6b68-7db4-406f-bc7b-48f23ed216b5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.266179] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 898.266179] env[62522]: value = "task-2415645" [ 898.266179] env[62522]: _type = "Task" [ 898.266179] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.273104] env[62522]: INFO nova.compute.manager [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] instance snapshotting [ 898.276329] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-180e7f3c-cce5-485e-bbee-a68fb061f11d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.282957] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415645, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.303735] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff8f5809-5d6b-4abc-a150-1e4bbd365e42 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.467214] env[62522]: DEBUG oslo_vmware.api [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Task: {'id': task-2415643, 'name': RelocateVM_Task} progress is 54%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.557734] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Releasing lock "refresh_cache-845f99b8-4a9d-4fbe-89e1-825a5ddd01f2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.559421] env[62522]: DEBUG nova.compute.manager [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Instance network_info: |[{"id": "cecb41e7-0c40-40fd-b130-fc0afe3fba0d", "address": "fa:16:3e:d5:32:20", "network": {"id": "d6a06fb0-929f-44b6-93c4-698be8498194", "bridge": "br-int", "label": "tempest-ImagesTestJSON-272550236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61314d3f0b9e4c368312e714a953e549", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "399f3826-705c-45f7-9fe0-3a08a945151a", "external-id": "nsx-vlan-transportzone-936", "segmentation_id": 936, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcecb41e7-0c", "ovs_interfaceid": "cecb41e7-0c40-40fd-b130-fc0afe3fba0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 898.559421] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:32:20', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '399f3826-705c-45f7-9fe0-3a08a945151a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cecb41e7-0c40-40fd-b130-fc0afe3fba0d', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 898.567446] env[62522]: DEBUG oslo.service.loopingcall [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 898.567862] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 898.568090] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0e8fbbad-6add-42df-84a4-e605680f4fef {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.590453] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 898.590453] env[62522]: value = "task-2415646" [ 898.590453] env[62522]: _type = "Task" [ 898.590453] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.600664] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415646, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.748585] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.671s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 898.749160] env[62522]: DEBUG nova.compute.manager [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 898.752643] env[62522]: DEBUG oslo_concurrency.lockutils [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 48.712s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 898.753058] env[62522]: DEBUG nova.objects.instance [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lazy-loading 'resources' on Instance uuid 0d36b844-554e-46e7-9cf9-ef04b67e8898 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 898.788188] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415645, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.818160] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Creating Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 898.818160] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-dab2edc4-1b6b-4a28-bc27-a804aae209e5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.828026] env[62522]: DEBUG oslo_vmware.api [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the task: (returnval){ [ 898.828026] env[62522]: value = "task-2415647" [ 898.828026] env[62522]: _type = "Task" [ 898.828026] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.837155] env[62522]: DEBUG oslo_vmware.api [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415647, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.865471] env[62522]: DEBUG nova.compute.manager [req-5df93e71-8b4b-488d-8df3-457a54f2d9b9 req-506aece4-80fc-4ee1-b685-581ac94e9d79 service nova] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Received event network-changed-cecb41e7-0c40-40fd-b130-fc0afe3fba0d {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 898.865471] env[62522]: DEBUG nova.compute.manager [req-5df93e71-8b4b-488d-8df3-457a54f2d9b9 req-506aece4-80fc-4ee1-b685-581ac94e9d79 service nova] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Refreshing instance network info cache due to event network-changed-cecb41e7-0c40-40fd-b130-fc0afe3fba0d. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 898.865471] env[62522]: DEBUG oslo_concurrency.lockutils [req-5df93e71-8b4b-488d-8df3-457a54f2d9b9 req-506aece4-80fc-4ee1-b685-581ac94e9d79 service nova] Acquiring lock "refresh_cache-845f99b8-4a9d-4fbe-89e1-825a5ddd01f2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 898.865471] env[62522]: DEBUG oslo_concurrency.lockutils [req-5df93e71-8b4b-488d-8df3-457a54f2d9b9 req-506aece4-80fc-4ee1-b685-581ac94e9d79 service nova] Acquired lock "refresh_cache-845f99b8-4a9d-4fbe-89e1-825a5ddd01f2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.865471] env[62522]: DEBUG nova.network.neutron [req-5df93e71-8b4b-488d-8df3-457a54f2d9b9 req-506aece4-80fc-4ee1-b685-581ac94e9d79 service nova] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Refreshing network info cache for port cecb41e7-0c40-40fd-b130-fc0afe3fba0d {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 898.966963] env[62522]: DEBUG oslo_vmware.api [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Task: {'id': task-2415643, 'name': RelocateVM_Task} progress is 67%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.104027] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415646, 'name': CreateVM_Task, 'duration_secs': 0.494304} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.104027] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 899.104027] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.104027] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.104295] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 899.104407] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97095454-863d-454a-9f06-8e49f37311ea {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.111680] env[62522]: DEBUG oslo_vmware.api [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 899.111680] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c5b783-bf75-93e8-aaa0-be21ce7f22ea" [ 899.111680] env[62522]: _type = "Task" [ 899.111680] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.122627] env[62522]: DEBUG oslo_vmware.api [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c5b783-bf75-93e8-aaa0-be21ce7f22ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.259747] env[62522]: DEBUG nova.compute.utils [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 899.261609] env[62522]: DEBUG nova.compute.manager [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 899.261818] env[62522]: DEBUG nova.network.neutron [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 899.283824] env[62522]: DEBUG oslo_vmware.api [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415645, 'name': PowerOnVM_Task, 'duration_secs': 0.720681} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.284200] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 899.284450] env[62522]: DEBUG nova.compute.manager [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 899.285490] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da88710b-0ec8-43ac-a495-cf3dedf550c5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.326135] env[62522]: DEBUG nova.policy [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8eff9205ccb14bc89c4b1be13efd4a24', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ce263d67988b4448b181b122b9270155', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 899.339905] env[62522]: DEBUG oslo_vmware.api [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415647, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.468866] env[62522]: DEBUG oslo_vmware.api [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Task: {'id': task-2415643, 'name': RelocateVM_Task} progress is 81%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.626204] env[62522]: DEBUG oslo_vmware.api [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c5b783-bf75-93e8-aaa0-be21ce7f22ea, 'name': SearchDatastore_Task, 'duration_secs': 0.021614} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.629388] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.629745] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 899.630103] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.630311] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.630571] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 899.632051] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e87cbfac-4c07-4468-aa76-ae791e62f400 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.647838] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 899.647838] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 899.648692] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e162683b-fde0-4b4a-9be5-06d312ad5f55 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.658239] env[62522]: DEBUG oslo_vmware.api [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 899.658239] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522d2513-0cce-3f91-c2cc-3607aeb4a2fe" [ 899.658239] env[62522]: _type = "Task" [ 899.658239] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.670087] env[62522]: DEBUG oslo_vmware.api [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522d2513-0cce-3f91-c2cc-3607aeb4a2fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.764276] env[62522]: DEBUG nova.network.neutron [req-5df93e71-8b4b-488d-8df3-457a54f2d9b9 req-506aece4-80fc-4ee1-b685-581ac94e9d79 service nova] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Updated VIF entry in instance network info cache for port cecb41e7-0c40-40fd-b130-fc0afe3fba0d. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 899.765217] env[62522]: DEBUG nova.network.neutron [req-5df93e71-8b4b-488d-8df3-457a54f2d9b9 req-506aece4-80fc-4ee1-b685-581ac94e9d79 service nova] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Updating instance_info_cache with network_info: [{"id": "cecb41e7-0c40-40fd-b130-fc0afe3fba0d", "address": "fa:16:3e:d5:32:20", "network": {"id": "d6a06fb0-929f-44b6-93c4-698be8498194", "bridge": "br-int", "label": "tempest-ImagesTestJSON-272550236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61314d3f0b9e4c368312e714a953e549", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "399f3826-705c-45f7-9fe0-3a08a945151a", "external-id": "nsx-vlan-transportzone-936", "segmentation_id": 936, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcecb41e7-0c", "ovs_interfaceid": "cecb41e7-0c40-40fd-b130-fc0afe3fba0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.771814] env[62522]: DEBUG nova.compute.manager [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 899.818456] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 899.843433] env[62522]: DEBUG oslo_vmware.api [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415647, 'name': CreateSnapshot_Task, 'duration_secs': 0.847708} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.844090] env[62522]: DEBUG nova.network.neutron [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Successfully created port: 19506fca-4c28-41e4-b1fb-d6386948229a {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 899.846804] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Created Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 899.847669] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ed227da-b322-4467-8534-d942311ca1fa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.878812] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dc130ac-6d6e-4aaf-a55a-c9ec348a0b30 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.889198] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d51f86e-8f30-4fb8-9f06-e7f68eaaabed {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.927740] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bec6dd8f-52d2-4839-97f2-519aeef8f8d6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.935760] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c110e6f0-c297-434c-8cbd-f7d5898cea83 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.951659] env[62522]: DEBUG nova.compute.provider_tree [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 899.964115] env[62522]: DEBUG oslo_vmware.api [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Task: {'id': task-2415643, 'name': RelocateVM_Task} progress is 95%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.167487] env[62522]: DEBUG oslo_vmware.api [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522d2513-0cce-3f91-c2cc-3607aeb4a2fe, 'name': SearchDatastore_Task, 'duration_secs': 0.020846} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.168298] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3b04219-eef5-455b-b319-52a1894742c9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.173423] env[62522]: DEBUG oslo_vmware.api [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 900.173423] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52ee1c26-ae53-5db9-6630-9918eda4a72f" [ 900.173423] env[62522]: _type = "Task" [ 900.173423] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.181123] env[62522]: DEBUG oslo_vmware.api [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52ee1c26-ae53-5db9-6630-9918eda4a72f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.268759] env[62522]: DEBUG oslo_concurrency.lockutils [req-5df93e71-8b4b-488d-8df3-457a54f2d9b9 req-506aece4-80fc-4ee1-b685-581ac94e9d79 service nova] Releasing lock "refresh_cache-845f99b8-4a9d-4fbe-89e1-825a5ddd01f2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.374641] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Creating linked-clone VM from snapshot {{(pid=62522) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 900.374955] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6436de9b-32ec-425e-86ad-b0f98f52bd5a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.384342] env[62522]: DEBUG oslo_vmware.api [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the task: (returnval){ [ 900.384342] env[62522]: value = "task-2415648" [ 900.384342] env[62522]: _type = "Task" [ 900.384342] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.394693] env[62522]: DEBUG oslo_vmware.api [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415648, 'name': CloneVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.456188] env[62522]: DEBUG nova.scheduler.client.report [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 900.473489] env[62522]: DEBUG oslo_vmware.api [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Task: {'id': task-2415643, 'name': RelocateVM_Task} progress is 98%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.686219] env[62522]: DEBUG oslo_vmware.api [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52ee1c26-ae53-5db9-6630-9918eda4a72f, 'name': SearchDatastore_Task, 'duration_secs': 0.03529} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.686523] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.686815] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2/845f99b8-4a9d-4fbe-89e1-825a5ddd01f2.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 900.687145] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bf91111e-16a3-42d4-8c0d-ee16313e5d78 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.694273] env[62522]: DEBUG oslo_vmware.api [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 900.694273] env[62522]: value = "task-2415649" [ 900.694273] env[62522]: _type = "Task" [ 900.694273] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.703983] env[62522]: DEBUG oslo_vmware.api [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415649, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.782719] env[62522]: DEBUG nova.compute.manager [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 900.804389] env[62522]: DEBUG nova.virt.hardware [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 900.804658] env[62522]: DEBUG nova.virt.hardware [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 900.804793] env[62522]: DEBUG nova.virt.hardware [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 900.804966] env[62522]: DEBUG nova.virt.hardware [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 900.805118] env[62522]: DEBUG nova.virt.hardware [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 900.805269] env[62522]: DEBUG nova.virt.hardware [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 900.805479] env[62522]: DEBUG nova.virt.hardware [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 900.805635] env[62522]: DEBUG nova.virt.hardware [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 900.805799] env[62522]: DEBUG nova.virt.hardware [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 900.805960] env[62522]: DEBUG nova.virt.hardware [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 900.806158] env[62522]: DEBUG nova.virt.hardware [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 900.807042] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9736c91b-0a4a-4102-92f6-fbf29461fe68 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.816773] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19eaa78c-c8b5-41f3-99ef-9f322e24f2c7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.894712] env[62522]: DEBUG oslo_vmware.api [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415648, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.970639] env[62522]: DEBUG oslo_concurrency.lockutils [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.218s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.972563] env[62522]: DEBUG oslo_vmware.api [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Task: {'id': task-2415643, 'name': RelocateVM_Task} progress is 98%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.973318] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 48.033s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.973318] env[62522]: DEBUG nova.objects.instance [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Lazy-loading 'resources' on Instance uuid 41a980df-88a9-4f9b-b34b-905b226c0675 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 900.997249] env[62522]: INFO nova.scheduler.client.report [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Deleted allocations for instance 0d36b844-554e-46e7-9cf9-ef04b67e8898 [ 901.049335] env[62522]: DEBUG oslo_concurrency.lockutils [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "fe1f5581-0dec-41e5-a450-c3de5a573602" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.049607] env[62522]: DEBUG oslo_concurrency.lockutils [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "fe1f5581-0dec-41e5-a450-c3de5a573602" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.049803] env[62522]: DEBUG oslo_concurrency.lockutils [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "fe1f5581-0dec-41e5-a450-c3de5a573602-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.050056] env[62522]: DEBUG oslo_concurrency.lockutils [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "fe1f5581-0dec-41e5-a450-c3de5a573602-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.050261] env[62522]: DEBUG oslo_concurrency.lockutils [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "fe1f5581-0dec-41e5-a450-c3de5a573602-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.052676] env[62522]: INFO nova.compute.manager [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Terminating instance [ 901.203892] env[62522]: DEBUG oslo_vmware.api [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415649, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.398054] env[62522]: DEBUG oslo_vmware.api [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415648, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.471257] env[62522]: DEBUG oslo_vmware.api [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Task: {'id': task-2415643, 'name': RelocateVM_Task} progress is 98%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.513192] env[62522]: DEBUG oslo_concurrency.lockutils [None req-26a01d8d-fe79-4105-b639-949fad03b8f9 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lock "0d36b844-554e-46e7-9cf9-ef04b67e8898" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 55.031s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.516665] env[62522]: DEBUG nova.compute.manager [req-c3eae2f6-4ff3-4e09-be96-ac13e986bb4c req-1905c767-6aa5-429f-bde4-11c205f40bce service nova] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Received event network-vif-plugged-19506fca-4c28-41e4-b1fb-d6386948229a {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 901.516939] env[62522]: DEBUG oslo_concurrency.lockutils [req-c3eae2f6-4ff3-4e09-be96-ac13e986bb4c req-1905c767-6aa5-429f-bde4-11c205f40bce service nova] Acquiring lock "043a0a1b-268c-4caa-b1f7-cc7d70c3b314-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.517208] env[62522]: DEBUG oslo_concurrency.lockutils [req-c3eae2f6-4ff3-4e09-be96-ac13e986bb4c req-1905c767-6aa5-429f-bde4-11c205f40bce service nova] Lock "043a0a1b-268c-4caa-b1f7-cc7d70c3b314-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.517505] env[62522]: DEBUG oslo_concurrency.lockutils [req-c3eae2f6-4ff3-4e09-be96-ac13e986bb4c req-1905c767-6aa5-429f-bde4-11c205f40bce service nova] Lock "043a0a1b-268c-4caa-b1f7-cc7d70c3b314-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.517705] env[62522]: DEBUG nova.compute.manager [req-c3eae2f6-4ff3-4e09-be96-ac13e986bb4c req-1905c767-6aa5-429f-bde4-11c205f40bce service nova] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] No waiting events found dispatching network-vif-plugged-19506fca-4c28-41e4-b1fb-d6386948229a {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 901.517882] env[62522]: WARNING nova.compute.manager [req-c3eae2f6-4ff3-4e09-be96-ac13e986bb4c req-1905c767-6aa5-429f-bde4-11c205f40bce service nova] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Received unexpected event network-vif-plugged-19506fca-4c28-41e4-b1fb-d6386948229a for instance with vm_state building and task_state spawning. [ 901.557508] env[62522]: DEBUG nova.compute.manager [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 901.557768] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 901.558817] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82531705-f663-47c6-8d73-a98fc641f74c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.568205] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 901.568547] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-15d07371-90a5-4fe5-8489-8b4789c4acd9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.580776] env[62522]: DEBUG oslo_vmware.api [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 901.580776] env[62522]: value = "task-2415650" [ 901.580776] env[62522]: _type = "Task" [ 901.580776] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.592806] env[62522]: DEBUG oslo_vmware.api [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415650, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.686705] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "917469c5-20be-4814-814f-a042415be021" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.687051] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "917469c5-20be-4814-814f-a042415be021" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.451876] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquiring lock "d30397b4-c617-4717-b624-ad1b06331bea" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.452198] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lock "d30397b4-c617-4717-b624-ad1b06331bea" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.452301] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquiring lock "d30397b4-c617-4717-b624-ad1b06331bea-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.452495] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lock "d30397b4-c617-4717-b624-ad1b06331bea-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.452666] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lock "d30397b4-c617-4717-b624-ad1b06331bea-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.454502] env[62522]: DEBUG oslo_vmware.api [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415649, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.454692] env[62522]: WARNING oslo_vmware.common.loopingcall [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] task run outlasted interval by 0.258667 sec [ 902.455514] env[62522]: DEBUG nova.network.neutron [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Successfully updated port: 19506fca-4c28-41e4-b1fb-d6386948229a {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 902.456619] env[62522]: INFO nova.compute.manager [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Terminating instance [ 902.485998] env[62522]: DEBUG oslo_vmware.api [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415649, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.023671} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.486267] env[62522]: DEBUG oslo_vmware.api [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415648, 'name': CloneVM_Task, 'duration_secs': 1.448779} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.486469] env[62522]: DEBUG oslo_vmware.api [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415650, 'name': PowerOffVM_Task, 'duration_secs': 0.387364} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.487124] env[62522]: DEBUG oslo_vmware.api [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Task: {'id': task-2415643, 'name': RelocateVM_Task} progress is 98%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.489454] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2/845f99b8-4a9d-4fbe-89e1-825a5ddd01f2.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 902.489668] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 902.489919] env[62522]: INFO nova.virt.vmwareapi.vmops [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Created linked-clone VM from snapshot [ 902.490209] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 902.490370] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 902.490947] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a82e0c15-b0d4-41a5-a171-525febd40292 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.493124] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37eb8335-9049-4d9d-ab01-77bbb1cb402b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.495611] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5d8169cd-ed59-4a37-9fb1-e777adc1a9bd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.502152] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Uploading image aacd6ff3-7a61-4369-ac92-96e059ef8864 {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 902.508817] env[62522]: DEBUG oslo_vmware.api [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 902.508817] env[62522]: value = "task-2415651" [ 902.508817] env[62522]: _type = "Task" [ 902.508817] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.517565] env[62522]: DEBUG oslo_vmware.api [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415651, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.529409] env[62522]: DEBUG oslo_vmware.rw_handles [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 902.529409] env[62522]: value = "vm-489737" [ 902.529409] env[62522]: _type = "VirtualMachine" [ 902.529409] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 902.529612] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d15ed66c-2237-45d5-b8e7-4bc622a1a710 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.535413] env[62522]: DEBUG oslo_vmware.rw_handles [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Lease: (returnval){ [ 902.535413] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52996d83-dbf9-fae7-565e-9b7f1f16b020" [ 902.535413] env[62522]: _type = "HttpNfcLease" [ 902.535413] env[62522]: } obtained for exporting VM: (result){ [ 902.535413] env[62522]: value = "vm-489737" [ 902.535413] env[62522]: _type = "VirtualMachine" [ 902.535413] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 902.535744] env[62522]: DEBUG oslo_vmware.api [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the lease: (returnval){ [ 902.535744] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52996d83-dbf9-fae7-565e-9b7f1f16b020" [ 902.535744] env[62522]: _type = "HttpNfcLease" [ 902.535744] env[62522]: } to be ready. {{(pid=62522) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 902.541812] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 902.541812] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52996d83-dbf9-fae7-565e-9b7f1f16b020" [ 902.541812] env[62522]: _type = "HttpNfcLease" [ 902.541812] env[62522]: } is initializing. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 902.559633] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 902.559901] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 902.560148] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Deleting the datastore file [datastore2] fe1f5581-0dec-41e5-a450-c3de5a573602 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 902.560406] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2f58cf01-84ce-4353-a792-7cbbf595fd04 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.571576] env[62522]: DEBUG oslo_vmware.api [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 902.571576] env[62522]: value = "task-2415654" [ 902.571576] env[62522]: _type = "Task" [ 902.571576] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.575720] env[62522]: DEBUG oslo_vmware.api [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415654, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.733227] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37049863-43cc-442c-97d7-8560470cb07d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.741368] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d14016ca-70e8-44c8-a75a-7793ba34e91d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.773801] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcfffd1d-b789-4b6f-bfab-cccda16fe4f8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.781206] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd750908-176c-4e5d-85b8-1806ad89da2a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.794130] env[62522]: DEBUG nova.compute.provider_tree [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 902.963060] env[62522]: DEBUG oslo_vmware.api [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Task: {'id': task-2415643, 'name': RelocateVM_Task} progress is 98%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.968665] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Acquiring lock "refresh_cache-043a0a1b-268c-4caa-b1f7-cc7d70c3b314" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.968842] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Acquired lock "refresh_cache-043a0a1b-268c-4caa-b1f7-cc7d70c3b314" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.969017] env[62522]: DEBUG nova.network.neutron [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 902.972062] env[62522]: DEBUG nova.compute.manager [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 902.972270] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 902.973015] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a05e380-ffc3-45c8-94f3-ca2c4a4b715b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.980390] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 902.980633] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cea5ed04-86fe-4119-8ca8-3ea401934887 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.986709] env[62522]: DEBUG oslo_vmware.api [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for the task: (returnval){ [ 902.986709] env[62522]: value = "task-2415655" [ 902.986709] env[62522]: _type = "Task" [ 902.986709] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.994874] env[62522]: DEBUG oslo_vmware.api [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415655, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.019067] env[62522]: DEBUG oslo_vmware.api [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415651, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06905} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.019424] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 903.020515] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a831b5c-d6f2-4abc-8c49-33503d5398b9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.044103] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Reconfiguring VM instance instance-0000003e to attach disk [datastore2] 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2/845f99b8-4a9d-4fbe-89e1-825a5ddd01f2.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 903.044448] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a03643cf-1a3e-48cc-9228-65bfdb86a537 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.066990] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 903.066990] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52996d83-dbf9-fae7-565e-9b7f1f16b020" [ 903.066990] env[62522]: _type = "HttpNfcLease" [ 903.066990] env[62522]: } is ready. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 903.066990] env[62522]: DEBUG oslo_vmware.rw_handles [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 903.066990] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52996d83-dbf9-fae7-565e-9b7f1f16b020" [ 903.066990] env[62522]: _type = "HttpNfcLease" [ 903.066990] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 903.068374] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80ca3255-f7ef-4788-8318-b9ecbecb48e3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.070797] env[62522]: DEBUG oslo_vmware.api [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 903.070797] env[62522]: value = "task-2415656" [ 903.070797] env[62522]: _type = "Task" [ 903.070797] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.079766] env[62522]: DEBUG oslo_vmware.rw_handles [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520136de-c5df-2156-59fc-709c28e655b6/disk-0.vmdk from lease info. {{(pid=62522) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 903.079939] env[62522]: DEBUG oslo_vmware.rw_handles [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520136de-c5df-2156-59fc-709c28e655b6/disk-0.vmdk for reading. {{(pid=62522) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 903.141506] env[62522]: DEBUG oslo_vmware.api [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415656, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.141782] env[62522]: DEBUG oslo_vmware.api [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415654, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139172} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.143200] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 903.143405] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 903.143628] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 903.143866] env[62522]: INFO nova.compute.manager [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Took 1.59 seconds to destroy the instance on the hypervisor. [ 903.143998] env[62522]: DEBUG oslo.service.loopingcall [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 903.144516] env[62522]: DEBUG nova.compute.manager [-] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 903.144615] env[62522]: DEBUG nova.network.neutron [-] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 903.181399] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-28eed36a-5c88-4462-aec0-a69e7d6e29e5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.301956] env[62522]: DEBUG nova.scheduler.client.report [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 903.470108] env[62522]: DEBUG oslo_vmware.api [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Task: {'id': task-2415643, 'name': RelocateVM_Task, 'duration_secs': 5.776918} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.470108] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Volume attach. Driver type: vmdk {{(pid=62522) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 903.470108] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489690', 'volume_id': '5a72a163-f70c-478d-aff3-2a748c2d25d5', 'name': 'volume-5a72a163-f70c-478d-aff3-2a748c2d25d5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6', 'attached_at': '', 'detached_at': '', 'volume_id': '5a72a163-f70c-478d-aff3-2a748c2d25d5', 'serial': '5a72a163-f70c-478d-aff3-2a748c2d25d5'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 903.470871] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-936246fc-e768-4018-85e8-57cadccc10d1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.492781] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d980bb3-1b78-4742-a1bd-43a96bc1929b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.500458] env[62522]: DEBUG oslo_vmware.api [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415655, 'name': PowerOffVM_Task, 'duration_secs': 0.180735} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.509842] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 903.510078] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 903.519221] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Reconfiguring VM instance instance-0000003d to attach disk [datastore2] volume-5a72a163-f70c-478d-aff3-2a748c2d25d5/volume-5a72a163-f70c-478d-aff3-2a748c2d25d5.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 903.520191] env[62522]: DEBUG nova.network.neutron [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 903.522248] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e3c879df-727d-4d9b-a83f-4f8df9cdff1b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.523851] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a702baf-5d18-4af1-9a4c-6d78a8db1fc0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.547788] env[62522]: DEBUG oslo_vmware.api [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Waiting for the task: (returnval){ [ 903.547788] env[62522]: value = "task-2415658" [ 903.547788] env[62522]: _type = "Task" [ 903.547788] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.557116] env[62522]: DEBUG oslo_vmware.api [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Task: {'id': task-2415658, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.581201] env[62522]: DEBUG oslo_vmware.api [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415656, 'name': ReconfigVM_Task, 'duration_secs': 0.24858} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.583603] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Reconfigured VM instance instance-0000003e to attach disk [datastore2] 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2/845f99b8-4a9d-4fbe-89e1-825a5ddd01f2.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 903.584366] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d3dc2fae-9afe-4487-8ab3-0534fb755a71 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.596723] env[62522]: DEBUG oslo_vmware.api [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 903.596723] env[62522]: value = "task-2415659" [ 903.596723] env[62522]: _type = "Task" [ 903.596723] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.601475] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 903.601763] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 903.602018] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Deleting the datastore file [datastore1] d30397b4-c617-4717-b624-ad1b06331bea {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 903.602982] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-82285021-7f91-4942-ac5e-694cfa6f2d79 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.610267] env[62522]: DEBUG oslo_vmware.api [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415659, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.612967] env[62522]: DEBUG nova.compute.manager [req-7287067c-fc49-4fbc-8605-b6461da6f1d6 req-c3771d56-c0ad-4bd8-9931-5eb6d90e0c1b service nova] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Received event network-changed-19506fca-4c28-41e4-b1fb-d6386948229a {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 903.613210] env[62522]: DEBUG nova.compute.manager [req-7287067c-fc49-4fbc-8605-b6461da6f1d6 req-c3771d56-c0ad-4bd8-9931-5eb6d90e0c1b service nova] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Refreshing instance network info cache due to event network-changed-19506fca-4c28-41e4-b1fb-d6386948229a. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 903.613425] env[62522]: DEBUG oslo_concurrency.lockutils [req-7287067c-fc49-4fbc-8605-b6461da6f1d6 req-c3771d56-c0ad-4bd8-9931-5eb6d90e0c1b service nova] Acquiring lock "refresh_cache-043a0a1b-268c-4caa-b1f7-cc7d70c3b314" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 903.616647] env[62522]: DEBUG oslo_vmware.api [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for the task: (returnval){ [ 903.616647] env[62522]: value = "task-2415660" [ 903.616647] env[62522]: _type = "Task" [ 903.616647] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.625281] env[62522]: DEBUG oslo_vmware.api [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415660, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.708499] env[62522]: DEBUG nova.network.neutron [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Updating instance_info_cache with network_info: [{"id": "19506fca-4c28-41e4-b1fb-d6386948229a", "address": "fa:16:3e:c0:ff:47", "network": {"id": "6be1ee4a-ede6-491a-8cf1-6f237b02c850", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1134150376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce263d67988b4448b181b122b9270155", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f49a7d-c6e5-404f-b71a-91d8c070cd18", "external-id": "nsx-vlan-transportzone-120", "segmentation_id": 120, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19506fca-4c", "ovs_interfaceid": "19506fca-4c28-41e4-b1fb-d6386948229a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.802793] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.830s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.806088] env[62522]: DEBUG oslo_concurrency.lockutils [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 50.142s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.806088] env[62522]: DEBUG nova.objects.instance [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lazy-loading 'resources' on Instance uuid ed7220fa-fee9-4715-acbb-236682c6729e {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 903.827667] env[62522]: INFO nova.scheduler.client.report [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Deleted allocations for instance 41a980df-88a9-4f9b-b34b-905b226c0675 [ 904.059726] env[62522]: DEBUG oslo_vmware.api [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Task: {'id': task-2415658, 'name': ReconfigVM_Task, 'duration_secs': 0.30429} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.060138] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Reconfigured VM instance instance-0000003d to attach disk [datastore2] volume-5a72a163-f70c-478d-aff3-2a748c2d25d5/volume-5a72a163-f70c-478d-aff3-2a748c2d25d5.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 904.064947] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-16b77939-4b94-4ae8-86b9-ba4f46a22c8b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.080714] env[62522]: DEBUG oslo_vmware.api [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Waiting for the task: (returnval){ [ 904.080714] env[62522]: value = "task-2415661" [ 904.080714] env[62522]: _type = "Task" [ 904.080714] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.089295] env[62522]: DEBUG oslo_vmware.api [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Task: {'id': task-2415661, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.089633] env[62522]: DEBUG nova.network.neutron [-] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.102973] env[62522]: DEBUG oslo_vmware.api [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415659, 'name': Rename_Task, 'duration_secs': 0.145045} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.104535] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 904.104885] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9972303c-9da6-431a-89a4-e144376f178c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.112288] env[62522]: DEBUG oslo_vmware.api [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 904.112288] env[62522]: value = "task-2415662" [ 904.112288] env[62522]: _type = "Task" [ 904.112288] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.124477] env[62522]: DEBUG oslo_vmware.api [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415662, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.129815] env[62522]: DEBUG oslo_vmware.api [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Task: {'id': task-2415660, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144439} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.130579] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 904.130894] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 904.131241] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 904.131477] env[62522]: INFO nova.compute.manager [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Took 1.16 seconds to destroy the instance on the hypervisor. [ 904.131821] env[62522]: DEBUG oslo.service.loopingcall [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 904.132136] env[62522]: DEBUG nova.compute.manager [-] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 904.132279] env[62522]: DEBUG nova.network.neutron [-] [instance: d30397b4-c617-4717-b624-ad1b06331bea] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 904.211670] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Releasing lock "refresh_cache-043a0a1b-268c-4caa-b1f7-cc7d70c3b314" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 904.212112] env[62522]: DEBUG nova.compute.manager [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Instance network_info: |[{"id": "19506fca-4c28-41e4-b1fb-d6386948229a", "address": "fa:16:3e:c0:ff:47", "network": {"id": "6be1ee4a-ede6-491a-8cf1-6f237b02c850", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1134150376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce263d67988b4448b181b122b9270155", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f49a7d-c6e5-404f-b71a-91d8c070cd18", "external-id": "nsx-vlan-transportzone-120", "segmentation_id": 120, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19506fca-4c", "ovs_interfaceid": "19506fca-4c28-41e4-b1fb-d6386948229a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 904.212504] env[62522]: DEBUG oslo_concurrency.lockutils [req-7287067c-fc49-4fbc-8605-b6461da6f1d6 req-c3771d56-c0ad-4bd8-9931-5eb6d90e0c1b service nova] Acquired lock "refresh_cache-043a0a1b-268c-4caa-b1f7-cc7d70c3b314" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.212873] env[62522]: DEBUG nova.network.neutron [req-7287067c-fc49-4fbc-8605-b6461da6f1d6 req-c3771d56-c0ad-4bd8-9931-5eb6d90e0c1b service nova] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Refreshing network info cache for port 19506fca-4c28-41e4-b1fb-d6386948229a {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 904.214612] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c0:ff:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f5f49a7d-c6e5-404f-b71a-91d8c070cd18', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '19506fca-4c28-41e4-b1fb-d6386948229a', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 904.227365] env[62522]: DEBUG oslo.service.loopingcall [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 904.231336] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 904.232379] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c4443e6f-b275-4a1d-a752-5e1890aed3c6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.255251] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 904.255251] env[62522]: value = "task-2415663" [ 904.255251] env[62522]: _type = "Task" [ 904.255251] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.264279] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415663, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.341161] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a07f6729-3cc2-48e2-925a-97b42cf56098 tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Lock "41a980df-88a9-4f9b-b34b-905b226c0675" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 54.836s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.346164] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35ae21a3-2d49-4ec1-ba16-7b7f28a54e3b tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Lock "41a980df-88a9-4f9b-b34b-905b226c0675" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 51.207s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.346760] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35ae21a3-2d49-4ec1-ba16-7b7f28a54e3b tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Acquiring lock "41a980df-88a9-4f9b-b34b-905b226c0675-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.348203] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35ae21a3-2d49-4ec1-ba16-7b7f28a54e3b tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Lock "41a980df-88a9-4f9b-b34b-905b226c0675-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.002s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.348414] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35ae21a3-2d49-4ec1-ba16-7b7f28a54e3b tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Lock "41a980df-88a9-4f9b-b34b-905b226c0675-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.352650] env[62522]: INFO nova.compute.manager [None req-35ae21a3-2d49-4ec1-ba16-7b7f28a54e3b tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Terminating instance [ 904.592894] env[62522]: INFO nova.compute.manager [-] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Took 1.45 seconds to deallocate network for instance. [ 904.593635] env[62522]: DEBUG oslo_vmware.api [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Task: {'id': task-2415661, 'name': ReconfigVM_Task, 'duration_secs': 0.127602} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.598227] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489690', 'volume_id': '5a72a163-f70c-478d-aff3-2a748c2d25d5', 'name': 'volume-5a72a163-f70c-478d-aff3-2a748c2d25d5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6', 'attached_at': '', 'detached_at': '', 'volume_id': '5a72a163-f70c-478d-aff3-2a748c2d25d5', 'serial': '5a72a163-f70c-478d-aff3-2a748c2d25d5'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 904.602253] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f5044df7-c3ff-46af-b811-dd06e84d39a6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.610202] env[62522]: DEBUG oslo_vmware.api [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Waiting for the task: (returnval){ [ 904.610202] env[62522]: value = "task-2415664" [ 904.610202] env[62522]: _type = "Task" [ 904.610202] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.626233] env[62522]: DEBUG oslo_vmware.api [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Task: {'id': task-2415664, 'name': Rename_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.630720] env[62522]: DEBUG oslo_vmware.api [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415662, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.694148] env[62522]: DEBUG nova.network.neutron [req-7287067c-fc49-4fbc-8605-b6461da6f1d6 req-c3771d56-c0ad-4bd8-9931-5eb6d90e0c1b service nova] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Updated VIF entry in instance network info cache for port 19506fca-4c28-41e4-b1fb-d6386948229a. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 904.694148] env[62522]: DEBUG nova.network.neutron [req-7287067c-fc49-4fbc-8605-b6461da6f1d6 req-c3771d56-c0ad-4bd8-9931-5eb6d90e0c1b service nova] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Updating instance_info_cache with network_info: [{"id": "19506fca-4c28-41e4-b1fb-d6386948229a", "address": "fa:16:3e:c0:ff:47", "network": {"id": "6be1ee4a-ede6-491a-8cf1-6f237b02c850", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1134150376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce263d67988b4448b181b122b9270155", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f49a7d-c6e5-404f-b71a-91d8c070cd18", "external-id": "nsx-vlan-transportzone-120", "segmentation_id": 120, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19506fca-4c", "ovs_interfaceid": "19506fca-4c28-41e4-b1fb-d6386948229a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.764223] env[62522]: DEBUG nova.compute.manager [req-0f96e79d-34a2-4ddd-887a-c99702b138b8 req-a92dd8d4-5c9f-4f59-a68a-a0a718219e54 service nova] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Received event network-vif-deleted-290fda08-0629-455f-b80b-237754fd93f2 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 904.764373] env[62522]: INFO nova.compute.manager [req-0f96e79d-34a2-4ddd-887a-c99702b138b8 req-a92dd8d4-5c9f-4f59-a68a-a0a718219e54 service nova] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Neutron deleted interface 290fda08-0629-455f-b80b-237754fd93f2; detaching it from the instance and deleting it from the info cache [ 904.764552] env[62522]: DEBUG nova.network.neutron [req-0f96e79d-34a2-4ddd-887a-c99702b138b8 req-a92dd8d4-5c9f-4f59-a68a-a0a718219e54 service nova] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.772930] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415663, 'name': CreateVM_Task, 'duration_secs': 0.471789} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.776278] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 904.778321] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 904.778463] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.779207] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 904.779945] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0e059b7-6772-4d00-ba49-6f6ea9a8ba32 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.785689] env[62522]: DEBUG oslo_vmware.api [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Waiting for the task: (returnval){ [ 904.785689] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f41197-e106-0f2c-d7c9-5db1c3726620" [ 904.785689] env[62522]: _type = "Task" [ 904.785689] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.799804] env[62522]: DEBUG oslo_vmware.api [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f41197-e106-0f2c-d7c9-5db1c3726620, 'name': SearchDatastore_Task, 'duration_secs': 0.010363} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.800243] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 904.800457] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 904.800823] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 904.801163] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.801383] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 904.801755] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d5cb73fa-694d-4e46-aeeb-58a2d2cff5da {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.812277] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 904.816021] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 904.816021] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7083f071-0941-4cf8-901d-f2cc4450e818 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.821809] env[62522]: DEBUG oslo_vmware.api [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Waiting for the task: (returnval){ [ 904.821809] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bc4eda-16e8-38b3-c296-6c4d1c956a10" [ 904.821809] env[62522]: _type = "Task" [ 904.821809] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.830419] env[62522]: DEBUG oslo_vmware.api [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bc4eda-16e8-38b3-c296-6c4d1c956a10, 'name': SearchDatastore_Task, 'duration_secs': 0.009138} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.833412] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5a5547b-1708-4316-bb76-c82cab511664 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.838764] env[62522]: DEBUG oslo_vmware.api [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Waiting for the task: (returnval){ [ 904.838764] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b11d04-996b-d283-0e48-85c412d02e7f" [ 904.838764] env[62522]: _type = "Task" [ 904.838764] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.846048] env[62522]: DEBUG oslo_vmware.api [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b11d04-996b-d283-0e48-85c412d02e7f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.863421] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35ae21a3-2d49-4ec1-ba16-7b7f28a54e3b tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Acquiring lock "refresh_cache-41a980df-88a9-4f9b-b34b-905b226c0675" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 904.863706] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35ae21a3-2d49-4ec1-ba16-7b7f28a54e3b tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Acquired lock "refresh_cache-41a980df-88a9-4f9b-b34b-905b226c0675" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.864035] env[62522]: DEBUG nova.network.neutron [None req-35ae21a3-2d49-4ec1-ba16-7b7f28a54e3b tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 904.888903] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45cb1114-8d48-4407-af88-9892cb0d5e4f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.897804] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2aaeaf8-4171-4f01-8673-7d48d5f7e94e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.929058] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f94e0511-b2fc-409d-8641-08191bfbe0aa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.936976] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fba8bdad-3257-4b3b-be7e-9b83d58cc126 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.952828] env[62522]: DEBUG nova.compute.provider_tree [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 905.106337] env[62522]: DEBUG oslo_concurrency.lockutils [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.124379] env[62522]: DEBUG oslo_vmware.api [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Task: {'id': task-2415664, 'name': Rename_Task, 'duration_secs': 0.150119} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.129561] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 905.129561] env[62522]: DEBUG oslo_vmware.api [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415662, 'name': PowerOnVM_Task, 'duration_secs': 0.710647} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.129561] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8f57d42c-4e00-4c1e-bfc4-c8346be9092b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.129561] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 905.129797] env[62522]: INFO nova.compute.manager [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Took 9.28 seconds to spawn the instance on the hypervisor. [ 905.130014] env[62522]: DEBUG nova.compute.manager [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 905.130724] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-142aeb4e-757e-45c8-81d6-fc65a7f4d29f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.140608] env[62522]: DEBUG oslo_vmware.api [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Waiting for the task: (returnval){ [ 905.140608] env[62522]: value = "task-2415665" [ 905.140608] env[62522]: _type = "Task" [ 905.140608] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.149620] env[62522]: DEBUG oslo_vmware.api [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Task: {'id': task-2415665, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.173322] env[62522]: DEBUG nova.network.neutron [-] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.197526] env[62522]: DEBUG oslo_concurrency.lockutils [req-7287067c-fc49-4fbc-8605-b6461da6f1d6 req-c3771d56-c0ad-4bd8-9931-5eb6d90e0c1b service nova] Releasing lock "refresh_cache-043a0a1b-268c-4caa-b1f7-cc7d70c3b314" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 905.267091] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ddda7b30-3977-4ccb-b9e6-c58f146d8ba3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.279624] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be84c9ce-c57b-46b4-9a25-552cc501735a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.315887] env[62522]: DEBUG nova.compute.manager [req-0f96e79d-34a2-4ddd-887a-c99702b138b8 req-a92dd8d4-5c9f-4f59-a68a-a0a718219e54 service nova] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Detach interface failed, port_id=290fda08-0629-455f-b80b-237754fd93f2, reason: Instance d30397b4-c617-4717-b624-ad1b06331bea could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 905.350033] env[62522]: DEBUG oslo_vmware.api [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b11d04-996b-d283-0e48-85c412d02e7f, 'name': SearchDatastore_Task, 'duration_secs': 0.008388} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.350406] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 905.350797] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 043a0a1b-268c-4caa-b1f7-cc7d70c3b314/043a0a1b-268c-4caa-b1f7-cc7d70c3b314.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 905.351184] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8b8e7117-13ab-4bcd-8643-dad3697ed624 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.363595] env[62522]: DEBUG oslo_vmware.api [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Waiting for the task: (returnval){ [ 905.363595] env[62522]: value = "task-2415666" [ 905.363595] env[62522]: _type = "Task" [ 905.363595] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.368033] env[62522]: DEBUG nova.compute.utils [None req-35ae21a3-2d49-4ec1-ba16-7b7f28a54e3b tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Can not refresh info_cache because instance was not found {{(pid=62522) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1055}} [ 905.377600] env[62522]: DEBUG oslo_vmware.api [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415666, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.389541] env[62522]: DEBUG nova.network.neutron [None req-35ae21a3-2d49-4ec1-ba16-7b7f28a54e3b tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 905.484495] env[62522]: ERROR nova.scheduler.client.report [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [req-6673c45b-eda5-4781-9560-0bc3a4b8e026] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c7fa38b2-245d-4337-a012-22c1a01c0a72. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6673c45b-eda5-4781-9560-0bc3a4b8e026"}]} [ 905.502348] env[62522]: DEBUG nova.scheduler.client.report [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Refreshing inventories for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 905.521361] env[62522]: DEBUG nova.scheduler.client.report [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Updating ProviderTree inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 905.521645] env[62522]: DEBUG nova.compute.provider_tree [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 905.535587] env[62522]: DEBUG nova.scheduler.client.report [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Refreshing aggregate associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, aggregates: None {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 905.557959] env[62522]: DEBUG nova.scheduler.client.report [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Refreshing trait associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 905.609113] env[62522]: DEBUG nova.network.neutron [None req-35ae21a3-2d49-4ec1-ba16-7b7f28a54e3b tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.659036] env[62522]: INFO nova.compute.manager [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Took 60.66 seconds to build instance. [ 905.666852] env[62522]: DEBUG oslo_vmware.api [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Task: {'id': task-2415665, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.677884] env[62522]: INFO nova.compute.manager [-] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Took 1.55 seconds to deallocate network for instance. [ 905.859433] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bfe7ee29-c280-4eda-86b0-bbf7d3ea59cb tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Acquiring lock "c1fd078c-61d4-4c0f-8c49-0f56a926a087" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.859771] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bfe7ee29-c280-4eda-86b0-bbf7d3ea59cb tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Lock "c1fd078c-61d4-4c0f-8c49-0f56a926a087" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 905.860054] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bfe7ee29-c280-4eda-86b0-bbf7d3ea59cb tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Acquiring lock "c1fd078c-61d4-4c0f-8c49-0f56a926a087-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.860312] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bfe7ee29-c280-4eda-86b0-bbf7d3ea59cb tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Lock "c1fd078c-61d4-4c0f-8c49-0f56a926a087-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 905.860514] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bfe7ee29-c280-4eda-86b0-bbf7d3ea59cb tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Lock "c1fd078c-61d4-4c0f-8c49-0f56a926a087-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.867085] env[62522]: INFO nova.compute.manager [None req-bfe7ee29-c280-4eda-86b0-bbf7d3ea59cb tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Terminating instance [ 905.882770] env[62522]: DEBUG oslo_vmware.api [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415666, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.886629] env[62522]: DEBUG nova.compute.manager [req-cb102055-64cb-415a-86fd-25700d74e451 req-683085e2-5eee-4170-90a5-d13c8997979c service nova] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Received event network-vif-deleted-15fdf0ad-85f9-4e6d-ace5-5a462f025e45 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 906.069909] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0de4eafc-c079-4032-9434-88a066fa026b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.077261] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc815000-a6b3-47ed-9b81-d01a9dfda051 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.107839] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9b2edca-9fad-4007-aeca-5bda9709f9ef {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.112120] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35ae21a3-2d49-4ec1-ba16-7b7f28a54e3b tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Releasing lock "refresh_cache-41a980df-88a9-4f9b-b34b-905b226c0675" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 906.112622] env[62522]: DEBUG nova.compute.manager [None req-35ae21a3-2d49-4ec1-ba16-7b7f28a54e3b tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 906.112773] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-35ae21a3-2d49-4ec1-ba16-7b7f28a54e3b tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 906.114922] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e5e5f378-1aec-4be1-9ccb-3717e478b7cc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.117877] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acde87e9-db46-43de-a256-5e60dc1f88f0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.131373] env[62522]: DEBUG nova.compute.provider_tree [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 906.137063] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2494c7b5-6a32-4c62-8f33-7bd49fec7ee5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.159209] env[62522]: DEBUG oslo_vmware.api [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Task: {'id': task-2415665, 'name': PowerOnVM_Task, 'duration_secs': 0.532298} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.172039] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 906.172323] env[62522]: INFO nova.compute.manager [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Took 10.73 seconds to spawn the instance on the hypervisor. [ 906.172592] env[62522]: DEBUG nova.compute.manager [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 906.173434] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c3618bed-7a15-440e-848e-13ec76d9f295 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "845f99b8-4a9d-4fbe-89e1-825a5ddd01f2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 99.510s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.173741] env[62522]: WARNING nova.virt.vmwareapi.vmops [None req-35ae21a3-2d49-4ec1-ba16-7b7f28a54e3b tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 41a980df-88a9-4f9b-b34b-905b226c0675 could not be found. [ 906.173780] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-35ae21a3-2d49-4ec1-ba16-7b7f28a54e3b tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 906.173921] env[62522]: INFO nova.compute.manager [None req-35ae21a3-2d49-4ec1-ba16-7b7f28a54e3b tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Took 0.06 seconds to destroy the instance on the hypervisor. [ 906.174172] env[62522]: DEBUG oslo.service.loopingcall [None req-35ae21a3-2d49-4ec1-ba16-7b7f28a54e3b tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 906.174981] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a994f3-d636-414f-922f-45c8a255d1c6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.178503] env[62522]: DEBUG nova.compute.manager [-] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 906.178503] env[62522]: DEBUG nova.network.neutron [-] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 906.186697] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.195954] env[62522]: DEBUG nova.network.neutron [-] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 906.377088] env[62522]: DEBUG nova.compute.manager [None req-bfe7ee29-c280-4eda-86b0-bbf7d3ea59cb tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 906.377329] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bfe7ee29-c280-4eda-86b0-bbf7d3ea59cb tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 906.378166] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c02abb37-3715-44fc-b05e-0e1a5615de4b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.381759] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fe53d02-8bfa-4dc7-b01a-410464d57af6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.389960] env[62522]: DEBUG oslo_vmware.api [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415666, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.519071} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.393598] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 043a0a1b-268c-4caa-b1f7-cc7d70c3b314/043a0a1b-268c-4caa-b1f7-cc7d70c3b314.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 906.393858] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 906.394213] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfe7ee29-c280-4eda-86b0-bbf7d3ea59cb tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 906.394460] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d275b6cd-7b26-46c6-8e55-220a1ab4d74d tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Suspending the VM {{(pid=62522) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 906.394682] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-61170f34-bfd0-456c-a75d-d02ed4e9950b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.396679] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4699aac3-9f02-44d5-9ffc-49958228c40b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.398168] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-8ed014ad-b9a8-4ad8-95a9-ef136fdb62ff {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.407676] env[62522]: DEBUG oslo_vmware.api [None req-d275b6cd-7b26-46c6-8e55-220a1ab4d74d tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 906.407676] env[62522]: value = "task-2415669" [ 906.407676] env[62522]: _type = "Task" [ 906.407676] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.409274] env[62522]: DEBUG oslo_vmware.api [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Waiting for the task: (returnval){ [ 906.409274] env[62522]: value = "task-2415667" [ 906.409274] env[62522]: _type = "Task" [ 906.409274] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.409504] env[62522]: DEBUG oslo_vmware.api [None req-bfe7ee29-c280-4eda-86b0-bbf7d3ea59cb tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Waiting for the task: (returnval){ [ 906.409504] env[62522]: value = "task-2415668" [ 906.409504] env[62522]: _type = "Task" [ 906.409504] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.424962] env[62522]: DEBUG oslo_vmware.api [None req-d275b6cd-7b26-46c6-8e55-220a1ab4d74d tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415669, 'name': SuspendVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.431473] env[62522]: DEBUG oslo_vmware.api [None req-bfe7ee29-c280-4eda-86b0-bbf7d3ea59cb tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': task-2415668, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.431758] env[62522]: DEBUG oslo_vmware.api [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415667, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.635754] env[62522]: DEBUG nova.scheduler.client.report [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 906.681124] env[62522]: DEBUG nova.compute.manager [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 906.697682] env[62522]: INFO nova.compute.manager [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Took 63.11 seconds to build instance. [ 906.699145] env[62522]: DEBUG nova.network.neutron [-] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.924442] env[62522]: DEBUG oslo_vmware.api [None req-d275b6cd-7b26-46c6-8e55-220a1ab4d74d tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415669, 'name': SuspendVM_Task} progress is 54%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.930229] env[62522]: DEBUG oslo_vmware.api [None req-bfe7ee29-c280-4eda-86b0-bbf7d3ea59cb tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': task-2415668, 'name': PowerOffVM_Task, 'duration_secs': 0.466741} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.930475] env[62522]: DEBUG oslo_vmware.api [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415667, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079597} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.930716] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfe7ee29-c280-4eda-86b0-bbf7d3ea59cb tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 906.930883] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bfe7ee29-c280-4eda-86b0-bbf7d3ea59cb tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 906.931164] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 906.931434] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-74ae2bb5-a0a1-4150-bdb3-619d4c505608 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.933640] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-753443ef-f6e6-4307-9705-ace23189d562 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.957266] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] 043a0a1b-268c-4caa-b1f7-cc7d70c3b314/043a0a1b-268c-4caa-b1f7-cc7d70c3b314.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 906.957680] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef42b1b7-0257-4fa8-b936-b1ab184c9c89 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.976972] env[62522]: DEBUG oslo_vmware.api [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Waiting for the task: (returnval){ [ 906.976972] env[62522]: value = "task-2415671" [ 906.976972] env[62522]: _type = "Task" [ 906.976972] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.985453] env[62522]: DEBUG oslo_vmware.api [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415671, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.992080] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bfe7ee29-c280-4eda-86b0-bbf7d3ea59cb tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 906.992363] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bfe7ee29-c280-4eda-86b0-bbf7d3ea59cb tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 906.992516] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-bfe7ee29-c280-4eda-86b0-bbf7d3ea59cb tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Deleting the datastore file [datastore2] c1fd078c-61d4-4c0f-8c49-0f56a926a087 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 906.992784] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-44c78509-704f-4a75-8f87-a7472be6132c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.999056] env[62522]: DEBUG oslo_vmware.api [None req-bfe7ee29-c280-4eda-86b0-bbf7d3ea59cb tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Waiting for the task: (returnval){ [ 906.999056] env[62522]: value = "task-2415672" [ 906.999056] env[62522]: _type = "Task" [ 906.999056] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.007981] env[62522]: DEBUG oslo_vmware.api [None req-bfe7ee29-c280-4eda-86b0-bbf7d3ea59cb tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': task-2415672, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.142205] env[62522]: DEBUG oslo_concurrency.lockutils [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.336s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.145139] env[62522]: DEBUG oslo_concurrency.lockutils [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.443s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.145446] env[62522]: DEBUG nova.objects.instance [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Lazy-loading 'resources' on Instance uuid 504396d8-077d-4563-91b5-a7a6259eea27 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 907.166279] env[62522]: INFO nova.scheduler.client.report [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Deleted allocations for instance ed7220fa-fee9-4715-acbb-236682c6729e [ 907.201986] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e78a0426-aacc-4acf-8412-14c948fb803e tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Lock "35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 101.604s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.202686] env[62522]: INFO nova.compute.manager [-] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Took 1.02 seconds to deallocate network for instance. [ 907.213658] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.424035] env[62522]: DEBUG oslo_vmware.api [None req-d275b6cd-7b26-46c6-8e55-220a1ab4d74d tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415669, 'name': SuspendVM_Task, 'duration_secs': 0.777628} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.424426] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d275b6cd-7b26-46c6-8e55-220a1ab4d74d tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Suspended the VM {{(pid=62522) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 907.424660] env[62522]: DEBUG nova.compute.manager [None req-d275b6cd-7b26-46c6-8e55-220a1ab4d74d tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 907.425510] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1f2e2b5-9972-4c3d-9687-c1aa94e4e129 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.487264] env[62522]: DEBUG oslo_vmware.api [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415671, 'name': ReconfigVM_Task, 'duration_secs': 0.387429} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.487662] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Reconfigured VM instance instance-0000003f to attach disk [datastore1] 043a0a1b-268c-4caa-b1f7-cc7d70c3b314/043a0a1b-268c-4caa-b1f7-cc7d70c3b314.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 907.488384] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1bbb2e65-7f79-4401-9285-7e79ecd0c5e3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.495499] env[62522]: DEBUG oslo_vmware.api [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Waiting for the task: (returnval){ [ 907.495499] env[62522]: value = "task-2415673" [ 907.495499] env[62522]: _type = "Task" [ 907.495499] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.506691] env[62522]: DEBUG oslo_vmware.api [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415673, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.511839] env[62522]: DEBUG oslo_vmware.api [None req-bfe7ee29-c280-4eda-86b0-bbf7d3ea59cb tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Task: {'id': task-2415672, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148549} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.512223] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-bfe7ee29-c280-4eda-86b0-bbf7d3ea59cb tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 907.512596] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bfe7ee29-c280-4eda-86b0-bbf7d3ea59cb tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 907.512884] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bfe7ee29-c280-4eda-86b0-bbf7d3ea59cb tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 907.513240] env[62522]: INFO nova.compute.manager [None req-bfe7ee29-c280-4eda-86b0-bbf7d3ea59cb tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Took 1.14 seconds to destroy the instance on the hypervisor. [ 907.513979] env[62522]: DEBUG oslo.service.loopingcall [None req-bfe7ee29-c280-4eda-86b0-bbf7d3ea59cb tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 907.513979] env[62522]: DEBUG nova.compute.manager [-] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 907.513979] env[62522]: DEBUG nova.network.neutron [-] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 907.678495] env[62522]: DEBUG oslo_concurrency.lockutils [None req-606922fd-eaca-4977-8967-01a88e796583 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "ed7220fa-fee9-4715-acbb-236682c6729e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 57.631s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.706869] env[62522]: DEBUG nova.compute.manager [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 907.713021] env[62522]: INFO nova.compute.manager [None req-35ae21a3-2d49-4ec1-ba16-7b7f28a54e3b tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Instance disappeared during terminate [ 907.713021] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35ae21a3-2d49-4ec1-ba16-7b7f28a54e3b tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Lock "41a980df-88a9-4f9b-b34b-905b226c0675" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.369s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 908.008891] env[62522]: DEBUG oslo_vmware.api [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415673, 'name': Rename_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.127503] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-738dc96f-f435-4edc-96c4-5ff76e9d68a6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.135496] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-731e3178-54c9-4366-b565-49861328e5a1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.141791] env[62522]: DEBUG nova.compute.manager [req-8dbce7c9-b8d1-4ae5-b08b-267c07fa3fd9 req-ec41dc1f-9426-417a-974a-5d6b0537a4ca service nova] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Received event network-vif-deleted-e44d8202-0840-41f3-a86d-8baffc8c19dd {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 908.142103] env[62522]: INFO nova.compute.manager [req-8dbce7c9-b8d1-4ae5-b08b-267c07fa3fd9 req-ec41dc1f-9426-417a-974a-5d6b0537a4ca service nova] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Neutron deleted interface e44d8202-0840-41f3-a86d-8baffc8c19dd; detaching it from the instance and deleting it from the info cache [ 908.142207] env[62522]: DEBUG nova.network.neutron [req-8dbce7c9-b8d1-4ae5-b08b-267c07fa3fd9 req-ec41dc1f-9426-417a-974a-5d6b0537a4ca service nova] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.180416] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ef3b2b-1d28-48dd-a57f-e0f1cf0cd1d5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.191378] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cb9c27a-31ad-4506-803d-b53174afe786 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.210120] env[62522]: DEBUG nova.compute.provider_tree [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 908.229952] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 908.508215] env[62522]: DEBUG oslo_vmware.api [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415673, 'name': Rename_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.586448] env[62522]: DEBUG nova.network.neutron [-] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.645642] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-eb686123-e8b9-4e7c-9b0f-d0a9d42059fc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.657036] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ef11e7c-3810-4d32-9a95-939ab74bb0b6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.693461] env[62522]: DEBUG nova.compute.manager [req-8dbce7c9-b8d1-4ae5-b08b-267c07fa3fd9 req-ec41dc1f-9426-417a-974a-5d6b0537a4ca service nova] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Detach interface failed, port_id=e44d8202-0840-41f3-a86d-8baffc8c19dd, reason: Instance c1fd078c-61d4-4c0f-8c49-0f56a926a087 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 908.713800] env[62522]: DEBUG nova.scheduler.client.report [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 909.008296] env[62522]: DEBUG oslo_vmware.api [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415673, 'name': Rename_Task, 'duration_secs': 1.445708} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.008593] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 909.008870] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-289ce7d7-72ed-4ab3-b2cb-d16a03e37a4a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.015767] env[62522]: DEBUG oslo_vmware.api [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Waiting for the task: (returnval){ [ 909.015767] env[62522]: value = "task-2415674" [ 909.015767] env[62522]: _type = "Task" [ 909.015767] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.025859] env[62522]: DEBUG oslo_vmware.api [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415674, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.088795] env[62522]: INFO nova.compute.manager [-] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Took 1.57 seconds to deallocate network for instance. [ 909.219141] env[62522]: DEBUG oslo_concurrency.lockutils [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.074s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.222363] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 40.628s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.223034] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.223034] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62522) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 909.223352] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.975s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.224925] env[62522]: INFO nova.compute.claims [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 909.228873] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f187cc9-ae46-40f4-8d31-fc258c72381d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.240582] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.240942] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.242502] env[62522]: INFO nova.scheduler.client.report [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Deleted allocations for instance 504396d8-077d-4563-91b5-a7a6259eea27 [ 909.244683] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beb61395-5fe4-43d0-847a-15ff6c92b34b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.262648] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62d27a43-38cd-436e-ba76-5517530f8dcf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.269842] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ebc9b7d-6b0a-4faa-8fa5-5c0329a00ba4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.301812] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179008MB free_disk=148GB free_vcpus=48 pci_devices=None {{(pid=62522) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 909.301979] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.526163] env[62522]: DEBUG oslo_vmware.api [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415674, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.595663] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bfe7ee29-c280-4eda-86b0-bbf7d3ea59cb tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.755021] env[62522]: DEBUG oslo_concurrency.lockutils [None req-210ec8ee-e744-4eed-bdcf-f25ec40c4a57 tempest-ImagesOneServerTestJSON-1035808317 tempest-ImagesOneServerTestJSON-1035808317-project-member] Lock "504396d8-077d-4563-91b5-a7a6259eea27" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.514s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.026204] env[62522]: DEBUG oslo_vmware.api [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415674, 'name': PowerOnVM_Task, 'duration_secs': 0.637884} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.026551] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 910.026799] env[62522]: INFO nova.compute.manager [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Took 9.24 seconds to spawn the instance on the hypervisor. [ 910.027027] env[62522]: DEBUG nova.compute.manager [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 910.027829] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a59cb4-0ec7-44b8-9cda-d9486c08cde2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.164759] env[62522]: DEBUG nova.compute.manager [req-96da6fec-5027-4521-aa8f-cbd6fa90f544 req-2e6f5cb5-e500-420a-9687-7c70a1d7b350 service nova] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Received event network-changed-59c9ae48-dc88-4de9-ba91-f62a004a177c {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 910.165065] env[62522]: DEBUG nova.compute.manager [req-96da6fec-5027-4521-aa8f-cbd6fa90f544 req-2e6f5cb5-e500-420a-9687-7c70a1d7b350 service nova] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Refreshing instance network info cache due to event network-changed-59c9ae48-dc88-4de9-ba91-f62a004a177c. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 910.165297] env[62522]: DEBUG oslo_concurrency.lockutils [req-96da6fec-5027-4521-aa8f-cbd6fa90f544 req-2e6f5cb5-e500-420a-9687-7c70a1d7b350 service nova] Acquiring lock "refresh_cache-35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 910.165443] env[62522]: DEBUG oslo_concurrency.lockutils [req-96da6fec-5027-4521-aa8f-cbd6fa90f544 req-2e6f5cb5-e500-420a-9687-7c70a1d7b350 service nova] Acquired lock "refresh_cache-35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.165607] env[62522]: DEBUG nova.network.neutron [req-96da6fec-5027-4521-aa8f-cbd6fa90f544 req-2e6f5cb5-e500-420a-9687-7c70a1d7b350 service nova] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Refreshing network info cache for port 59c9ae48-dc88-4de9-ba91-f62a004a177c {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 910.173257] env[62522]: DEBUG nova.compute.manager [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 910.174755] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21e8c8cf-da15-4f8c-9620-8d637fff85fc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.545237] env[62522]: INFO nova.compute.manager [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Took 61.18 seconds to build instance. [ 910.636405] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e89b9120-848f-45f1-8b83-bee2481adc7f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.643762] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe33769a-64eb-41ca-a8b8-a7ccabe1d381 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.677694] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da12017-6cec-411a-ac39-e2cfedae76f8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.685428] env[62522]: INFO nova.compute.manager [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] instance snapshotting [ 910.685744] env[62522]: WARNING nova.compute.manager [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 910.689220] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d474ee3-0f8c-456d-b427-db804977b6e5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.694152] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4fc4553-0d13-4c3f-9389-8c447f897d5b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.706760] env[62522]: DEBUG nova.compute.provider_tree [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 910.731202] env[62522]: DEBUG nova.scheduler.client.report [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 910.736298] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-679e790a-0e60-4e9d-aa66-f8751faa231d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.049925] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7ccc98d1-8bc5-4575-aa07-33884be6e4f8 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Lock "043a0a1b-268c-4caa-b1f7-cc7d70c3b314" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 96.287s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.240485] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.017s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.241062] env[62522]: DEBUG nova.compute.manager [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 911.243739] env[62522]: DEBUG oslo_concurrency.lockutils [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.381s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.245160] env[62522]: INFO nova.compute.claims [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 911.249209] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Creating Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 911.249493] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-3313bd6b-bc17-40e8-b3d9-173b969f1dc9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.260793] env[62522]: DEBUG oslo_vmware.api [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 911.260793] env[62522]: value = "task-2415675" [ 911.260793] env[62522]: _type = "Task" [ 911.260793] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.275791] env[62522]: DEBUG oslo_vmware.api [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415675, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.286618] env[62522]: DEBUG nova.network.neutron [req-96da6fec-5027-4521-aa8f-cbd6fa90f544 req-2e6f5cb5-e500-420a-9687-7c70a1d7b350 service nova] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Updated VIF entry in instance network info cache for port 59c9ae48-dc88-4de9-ba91-f62a004a177c. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 911.287012] env[62522]: DEBUG nova.network.neutron [req-96da6fec-5027-4521-aa8f-cbd6fa90f544 req-2e6f5cb5-e500-420a-9687-7c70a1d7b350 service nova] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Updating instance_info_cache with network_info: [{"id": "59c9ae48-dc88-4de9-ba91-f62a004a177c", "address": "fa:16:3e:49:66:a2", "network": {"id": "fe5f9959-d5b5-47d4-bfa0-689a4f70bf12", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-291790058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.186", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c08a602a8fe4b4396543ac75ac40e7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59c9ae48-dc", "ovs_interfaceid": "59c9ae48-dc88-4de9-ba91-f62a004a177c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.554296] env[62522]: DEBUG nova.compute.manager [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 911.753418] env[62522]: DEBUG nova.compute.utils [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 911.755718] env[62522]: DEBUG nova.compute.manager [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 911.755909] env[62522]: DEBUG nova.network.neutron [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 911.773113] env[62522]: DEBUG oslo_vmware.api [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415675, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.790586] env[62522]: DEBUG oslo_concurrency.lockutils [req-96da6fec-5027-4521-aa8f-cbd6fa90f544 req-2e6f5cb5-e500-420a-9687-7c70a1d7b350 service nova] Releasing lock "refresh_cache-35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 911.857758] env[62522]: DEBUG nova.policy [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4f5cbca1ae3544ab83eeb0eb80606c56', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ed393a0454b643eea75c203d1dfd592c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 912.086413] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.260498] env[62522]: DEBUG nova.compute.manager [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 912.276447] env[62522]: DEBUG oslo_vmware.api [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415675, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.555712] env[62522]: DEBUG oslo_vmware.rw_handles [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520136de-c5df-2156-59fc-709c28e655b6/disk-0.vmdk. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 912.556898] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1098a4c4-aa9c-473c-ba89-30bd0239cb47 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.565641] env[62522]: DEBUG oslo_vmware.rw_handles [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520136de-c5df-2156-59fc-709c28e655b6/disk-0.vmdk is in state: ready. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 912.565641] env[62522]: ERROR oslo_vmware.rw_handles [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520136de-c5df-2156-59fc-709c28e655b6/disk-0.vmdk due to incomplete transfer. [ 912.566578] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-5acc0c92-64b5-4fd1-8fee-101e031eee05 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.576612] env[62522]: DEBUG oslo_vmware.rw_handles [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520136de-c5df-2156-59fc-709c28e655b6/disk-0.vmdk. {{(pid=62522) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 912.576840] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Uploaded image aacd6ff3-7a61-4369-ac92-96e059ef8864 to the Glance image server {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 912.580906] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Destroying the VM {{(pid=62522) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 912.581441] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-df061466-1755-4089-a334-7667e453f0f6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.589224] env[62522]: DEBUG nova.network.neutron [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Successfully created port: 7e36641e-fc4a-4223-ab07-33dc49821168 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 912.593081] env[62522]: DEBUG oslo_vmware.api [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the task: (returnval){ [ 912.593081] env[62522]: value = "task-2415676" [ 912.593081] env[62522]: _type = "Task" [ 912.593081] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.605577] env[62522]: DEBUG oslo_vmware.api [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415676, 'name': Destroy_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.739581] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dd5ff43-b233-4de7-9ffa-ba0d1d623239 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.747017] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8ec09be-e0ef-4596-bd18-2f986984e40c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.788735] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c5ebb6b-2eb9-429f-89e4-92ea03ea1e4c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.796811] env[62522]: DEBUG oslo_vmware.api [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415675, 'name': CreateSnapshot_Task, 'duration_secs': 1.016693} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.799207] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Created Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 912.800319] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-008f4383-7fe0-436f-b47d-aa385e50bbb1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.803632] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc71ba6a-0def-43a0-935b-d936fe0a3eaa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.825725] env[62522]: DEBUG nova.compute.provider_tree [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 913.102563] env[62522]: DEBUG oslo_vmware.api [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415676, 'name': Destroy_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.286109] env[62522]: DEBUG nova.compute.manager [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 913.307703] env[62522]: DEBUG nova.virt.hardware [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 913.307956] env[62522]: DEBUG nova.virt.hardware [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 913.308129] env[62522]: DEBUG nova.virt.hardware [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 913.308318] env[62522]: DEBUG nova.virt.hardware [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 913.308462] env[62522]: DEBUG nova.virt.hardware [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 913.308608] env[62522]: DEBUG nova.virt.hardware [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 913.308812] env[62522]: DEBUG nova.virt.hardware [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 913.308966] env[62522]: DEBUG nova.virt.hardware [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 913.309151] env[62522]: DEBUG nova.virt.hardware [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 913.309318] env[62522]: DEBUG nova.virt.hardware [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 913.309490] env[62522]: DEBUG nova.virt.hardware [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 913.310421] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcfb2cfd-4157-4b22-921c-eb6f96e47039 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.318380] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23eb6b88-b98f-4ac8-adf1-4ee19cd79e94 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.339235] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Creating linked-clone VM from snapshot {{(pid=62522) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 913.340156] env[62522]: DEBUG nova.scheduler.client.report [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 913.345774] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-2d1c13be-c135-4e6c-8b5a-683e87dece19 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.352751] env[62522]: DEBUG oslo_vmware.api [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 913.352751] env[62522]: value = "task-2415677" [ 913.352751] env[62522]: _type = "Task" [ 913.352751] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.363298] env[62522]: DEBUG oslo_vmware.api [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415677, 'name': CloneVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.603103] env[62522]: DEBUG oslo_vmware.api [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415676, 'name': Destroy_Task, 'duration_secs': 0.946854} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.603386] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Destroyed the VM [ 913.603626] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Deleting Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 913.603887] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-97fa96a3-c61a-449c-974d-20a857d4b462 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.611063] env[62522]: DEBUG oslo_vmware.api [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the task: (returnval){ [ 913.611063] env[62522]: value = "task-2415678" [ 913.611063] env[62522]: _type = "Task" [ 913.611063] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.620286] env[62522]: DEBUG oslo_vmware.api [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415678, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.845729] env[62522]: DEBUG oslo_concurrency.lockutils [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.601s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.845729] env[62522]: DEBUG nova.compute.manager [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 913.848304] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 39.331s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.865178] env[62522]: DEBUG oslo_vmware.api [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415677, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.097345] env[62522]: DEBUG nova.compute.manager [req-a54c873e-a9b0-43e5-bd50-0e6710eec0da req-c3071a01-6635-4e98-b784-d52b10f346e8 service nova] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Received event network-vif-plugged-7e36641e-fc4a-4223-ab07-33dc49821168 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 914.097345] env[62522]: DEBUG oslo_concurrency.lockutils [req-a54c873e-a9b0-43e5-bd50-0e6710eec0da req-c3071a01-6635-4e98-b784-d52b10f346e8 service nova] Acquiring lock "04a9d357-d094-487b-8f09-2f7e0c35f0d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.097345] env[62522]: DEBUG oslo_concurrency.lockutils [req-a54c873e-a9b0-43e5-bd50-0e6710eec0da req-c3071a01-6635-4e98-b784-d52b10f346e8 service nova] Lock "04a9d357-d094-487b-8f09-2f7e0c35f0d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.097345] env[62522]: DEBUG oslo_concurrency.lockutils [req-a54c873e-a9b0-43e5-bd50-0e6710eec0da req-c3071a01-6635-4e98-b784-d52b10f346e8 service nova] Lock "04a9d357-d094-487b-8f09-2f7e0c35f0d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.097345] env[62522]: DEBUG nova.compute.manager [req-a54c873e-a9b0-43e5-bd50-0e6710eec0da req-c3071a01-6635-4e98-b784-d52b10f346e8 service nova] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] No waiting events found dispatching network-vif-plugged-7e36641e-fc4a-4223-ab07-33dc49821168 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 914.097345] env[62522]: WARNING nova.compute.manager [req-a54c873e-a9b0-43e5-bd50-0e6710eec0da req-c3071a01-6635-4e98-b784-d52b10f346e8 service nova] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Received unexpected event network-vif-plugged-7e36641e-fc4a-4223-ab07-33dc49821168 for instance with vm_state building and task_state spawning. [ 914.122410] env[62522]: DEBUG oslo_vmware.api [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415678, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.250680] env[62522]: DEBUG nova.network.neutron [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Successfully updated port: 7e36641e-fc4a-4223-ab07-33dc49821168 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 914.302830] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7013c7f3-cf4c-41ea-bb6d-82f5f9898686 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Acquiring lock "043a0a1b-268c-4caa-b1f7-cc7d70c3b314" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.303109] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7013c7f3-cf4c-41ea-bb6d-82f5f9898686 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Lock "043a0a1b-268c-4caa-b1f7-cc7d70c3b314" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.303326] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7013c7f3-cf4c-41ea-bb6d-82f5f9898686 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Acquiring lock "043a0a1b-268c-4caa-b1f7-cc7d70c3b314-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.303508] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7013c7f3-cf4c-41ea-bb6d-82f5f9898686 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Lock "043a0a1b-268c-4caa-b1f7-cc7d70c3b314-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.303674] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7013c7f3-cf4c-41ea-bb6d-82f5f9898686 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Lock "043a0a1b-268c-4caa-b1f7-cc7d70c3b314-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.305785] env[62522]: INFO nova.compute.manager [None req-7013c7f3-cf4c-41ea-bb6d-82f5f9898686 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Terminating instance [ 914.351149] env[62522]: DEBUG nova.compute.utils [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 914.354949] env[62522]: INFO nova.compute.claims [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 914.358601] env[62522]: DEBUG nova.compute.manager [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Not allocating networking since 'none' was specified. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 914.368661] env[62522]: DEBUG oslo_vmware.api [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415677, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.622145] env[62522]: DEBUG oslo_vmware.api [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415678, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.755707] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquiring lock "refresh_cache-04a9d357-d094-487b-8f09-2f7e0c35f0d7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 914.755880] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquired lock "refresh_cache-04a9d357-d094-487b-8f09-2f7e0c35f0d7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.756022] env[62522]: DEBUG nova.network.neutron [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 914.809711] env[62522]: DEBUG nova.compute.manager [None req-7013c7f3-cf4c-41ea-bb6d-82f5f9898686 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 914.809940] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7013c7f3-cf4c-41ea-bb6d-82f5f9898686 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 914.810886] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-408ec10b-eb8e-4997-925f-1ce93610d124 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.818682] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7013c7f3-cf4c-41ea-bb6d-82f5f9898686 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 914.818903] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-89da3fde-51b1-4312-bb72-4cfe804db7cf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.825153] env[62522]: DEBUG oslo_vmware.api [None req-7013c7f3-cf4c-41ea-bb6d-82f5f9898686 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Waiting for the task: (returnval){ [ 914.825153] env[62522]: value = "task-2415679" [ 914.825153] env[62522]: _type = "Task" [ 914.825153] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.833280] env[62522]: DEBUG oslo_vmware.api [None req-7013c7f3-cf4c-41ea-bb6d-82f5f9898686 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415679, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.859090] env[62522]: DEBUG nova.compute.manager [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 914.866278] env[62522]: INFO nova.compute.resource_tracker [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Updating resource usage from migration 8cef14b7-9f7c-4125-955c-e7a909c91b4f [ 914.875148] env[62522]: DEBUG oslo_vmware.api [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415677, 'name': CloneVM_Task} progress is 95%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.121500] env[62522]: DEBUG oslo_vmware.api [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415678, 'name': RemoveSnapshot_Task, 'duration_secs': 1.06633} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.123920] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Deleted Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 915.124179] env[62522]: INFO nova.compute.manager [None req-3f279ef7-5a7c-4dda-a681-f11b79885420 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Took 16.85 seconds to snapshot the instance on the hypervisor. [ 915.268304] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d47b32d-b49c-4065-8b67-791d785f097d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.276440] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0802c86d-09cb-4ded-86ac-45c48cc78264 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.308286] env[62522]: DEBUG nova.network.neutron [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 915.310706] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49ee0296-cdef-4dbd-a891-7fe20bc5ab59 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.318064] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072792ab-eaa0-4584-a161-92dead7ff4c9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.331348] env[62522]: DEBUG nova.compute.provider_tree [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 915.343332] env[62522]: DEBUG oslo_vmware.api [None req-7013c7f3-cf4c-41ea-bb6d-82f5f9898686 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415679, 'name': PowerOffVM_Task, 'duration_secs': 0.215865} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.343332] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7013c7f3-cf4c-41ea-bb6d-82f5f9898686 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 915.343469] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7013c7f3-cf4c-41ea-bb6d-82f5f9898686 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 915.343911] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6022ccc4-3ff1-42da-8483-ae6403c444bd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.376384] env[62522]: DEBUG oslo_vmware.api [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415677, 'name': CloneVM_Task, 'duration_secs': 1.637036} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.376644] env[62522]: INFO nova.virt.vmwareapi.vmops [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Created linked-clone VM from snapshot [ 915.377405] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f08c06e4-b53b-4f22-9153-ff4c0ef006c7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.387779] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Uploading image a0d9a5ba-d51f-44aa-afe5-9fd506bb5ca6 {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 915.415559] env[62522]: DEBUG oslo_vmware.rw_handles [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 915.415559] env[62522]: value = "vm-489740" [ 915.415559] env[62522]: _type = "VirtualMachine" [ 915.415559] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 915.416167] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-bd8a8460-0537-445d-9e87-fcda270dc131 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.426790] env[62522]: DEBUG oslo_vmware.rw_handles [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lease: (returnval){ [ 915.426790] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a01a1f-e48d-dc6f-9925-652a1d83a3b6" [ 915.426790] env[62522]: _type = "HttpNfcLease" [ 915.426790] env[62522]: } obtained for exporting VM: (result){ [ 915.426790] env[62522]: value = "vm-489740" [ 915.426790] env[62522]: _type = "VirtualMachine" [ 915.426790] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 915.427155] env[62522]: DEBUG oslo_vmware.api [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the lease: (returnval){ [ 915.427155] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a01a1f-e48d-dc6f-9925-652a1d83a3b6" [ 915.427155] env[62522]: _type = "HttpNfcLease" [ 915.427155] env[62522]: } to be ready. {{(pid=62522) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 915.431536] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7013c7f3-cf4c-41ea-bb6d-82f5f9898686 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 915.431841] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7013c7f3-cf4c-41ea-bb6d-82f5f9898686 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 915.432104] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-7013c7f3-cf4c-41ea-bb6d-82f5f9898686 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Deleting the datastore file [datastore1] 043a0a1b-268c-4caa-b1f7-cc7d70c3b314 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 915.432932] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aedd78f7-8feb-4c25-89f8-2f62c5967bbd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.436859] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 915.436859] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a01a1f-e48d-dc6f-9925-652a1d83a3b6" [ 915.436859] env[62522]: _type = "HttpNfcLease" [ 915.436859] env[62522]: } is initializing. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 915.441071] env[62522]: DEBUG oslo_vmware.api [None req-7013c7f3-cf4c-41ea-bb6d-82f5f9898686 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Waiting for the task: (returnval){ [ 915.441071] env[62522]: value = "task-2415682" [ 915.441071] env[62522]: _type = "Task" [ 915.441071] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.448970] env[62522]: DEBUG oslo_vmware.api [None req-7013c7f3-cf4c-41ea-bb6d-82f5f9898686 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415682, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.512181] env[62522]: DEBUG nova.network.neutron [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Updating instance_info_cache with network_info: [{"id": "7e36641e-fc4a-4223-ab07-33dc49821168", "address": "fa:16:3e:f1:bf:49", "network": {"id": "b837f0fb-c2e1-46dd-93b2-62d6c4352316", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1813744063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed393a0454b643eea75c203d1dfd592c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e36641e-fc", "ovs_interfaceid": "7e36641e-fc4a-4223-ab07-33dc49821168", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.857881] env[62522]: ERROR nova.scheduler.client.report [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [req-de9d623e-3050-4fb0-b174-10c528d097a1] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c7fa38b2-245d-4337-a012-22c1a01c0a72. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-de9d623e-3050-4fb0-b174-10c528d097a1"}]} [ 915.873388] env[62522]: DEBUG nova.compute.manager [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 915.876259] env[62522]: DEBUG nova.scheduler.client.report [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Refreshing inventories for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 915.892107] env[62522]: DEBUG nova.scheduler.client.report [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Updating ProviderTree inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 915.892337] env[62522]: DEBUG nova.compute.provider_tree [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 915.900181] env[62522]: DEBUG nova.virt.hardware [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 915.900841] env[62522]: DEBUG nova.virt.hardware [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 915.900841] env[62522]: DEBUG nova.virt.hardware [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 915.900841] env[62522]: DEBUG nova.virt.hardware [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 915.900978] env[62522]: DEBUG nova.virt.hardware [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 915.901026] env[62522]: DEBUG nova.virt.hardware [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 915.901500] env[62522]: DEBUG nova.virt.hardware [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 915.901500] env[62522]: DEBUG nova.virt.hardware [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 915.901598] env[62522]: DEBUG nova.virt.hardware [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 915.901697] env[62522]: DEBUG nova.virt.hardware [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 915.901863] env[62522]: DEBUG nova.virt.hardware [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 915.903023] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d43eab-aa57-4793-b1a4-344ba0169e70 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.905766] env[62522]: DEBUG nova.scheduler.client.report [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Refreshing aggregate associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, aggregates: None {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 915.913329] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4c72cfa-9adb-4080-909c-ad16691d286a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.927037] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Instance VIF info [] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 915.932498] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Creating folder: Project (4ece8b46eeee460da11d073745fb9e0f). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 915.933385] env[62522]: DEBUG nova.scheduler.client.report [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Refreshing trait associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 915.935315] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6cd58d26-13cc-40f7-a12d-f13b2be8a63e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.945313] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 915.945313] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a01a1f-e48d-dc6f-9925-652a1d83a3b6" [ 915.945313] env[62522]: _type = "HttpNfcLease" [ 915.945313] env[62522]: } is ready. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 915.945895] env[62522]: DEBUG oslo_vmware.rw_handles [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 915.945895] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a01a1f-e48d-dc6f-9925-652a1d83a3b6" [ 915.945895] env[62522]: _type = "HttpNfcLease" [ 915.945895] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 915.946638] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ff7441e-98d4-4775-b207-de6c13bf647b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.950045] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Created folder: Project (4ece8b46eeee460da11d073745fb9e0f) in parent group-v489562. [ 915.950253] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Creating folder: Instances. Parent ref: group-v489741. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 915.953275] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0d70f74e-26e9-4ce9-baee-51513d3f2418 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.954587] env[62522]: DEBUG oslo_vmware.api [None req-7013c7f3-cf4c-41ea-bb6d-82f5f9898686 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415682, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134825} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.957423] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-7013c7f3-cf4c-41ea-bb6d-82f5f9898686 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 915.957612] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7013c7f3-cf4c-41ea-bb6d-82f5f9898686 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 915.957791] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7013c7f3-cf4c-41ea-bb6d-82f5f9898686 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 915.957994] env[62522]: INFO nova.compute.manager [None req-7013c7f3-cf4c-41ea-bb6d-82f5f9898686 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Took 1.15 seconds to destroy the instance on the hypervisor. [ 915.958243] env[62522]: DEBUG oslo.service.loopingcall [None req-7013c7f3-cf4c-41ea-bb6d-82f5f9898686 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 915.958466] env[62522]: DEBUG oslo_vmware.rw_handles [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fc815e-cad6-5842-9240-15e912f523fb/disk-0.vmdk from lease info. {{(pid=62522) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 915.958625] env[62522]: DEBUG oslo_vmware.rw_handles [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fc815e-cad6-5842-9240-15e912f523fb/disk-0.vmdk for reading. {{(pid=62522) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 915.962155] env[62522]: DEBUG nova.compute.manager [-] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 915.962256] env[62522]: DEBUG nova.network.neutron [-] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 916.017866] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Releasing lock "refresh_cache-04a9d357-d094-487b-8f09-2f7e0c35f0d7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 916.018235] env[62522]: DEBUG nova.compute.manager [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Instance network_info: |[{"id": "7e36641e-fc4a-4223-ab07-33dc49821168", "address": "fa:16:3e:f1:bf:49", "network": {"id": "b837f0fb-c2e1-46dd-93b2-62d6c4352316", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1813744063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed393a0454b643eea75c203d1dfd592c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e36641e-fc", "ovs_interfaceid": "7e36641e-fc4a-4223-ab07-33dc49821168", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 916.023708] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:bf:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '09bf081b-cdf0-4977-abe2-2339a87409ab', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e36641e-fc4a-4223-ab07-33dc49821168', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 916.030967] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Creating folder: Project (ed393a0454b643eea75c203d1dfd592c). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 916.031268] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Created folder: Instances in parent group-v489741. [ 916.031483] env[62522]: DEBUG oslo.service.loopingcall [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 916.032849] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bfe16f6f-d797-45ce-bd87-c14e554fb1bf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.034769] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 916.035469] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-73d3e53a-9ce6-44a0-929a-94a50003b9e8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.053929] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 916.053929] env[62522]: value = "task-2415686" [ 916.053929] env[62522]: _type = "Task" [ 916.053929] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.058135] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Created folder: Project (ed393a0454b643eea75c203d1dfd592c) in parent group-v489562. [ 916.058379] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Creating folder: Instances. Parent ref: group-v489743. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 916.059711] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1c060f23-e784-4d5f-b777-906349dc4ffa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.064826] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415686, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.067665] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-bf83a7db-831c-4ce8-a726-2f260f849768 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.074240] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Created folder: Instances in parent group-v489743. [ 916.074461] env[62522]: DEBUG oslo.service.loopingcall [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 916.075195] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 916.075195] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-44637f26-0e19-4659-a0e0-a1162536a96a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.097767] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 916.097767] env[62522]: value = "task-2415688" [ 916.097767] env[62522]: _type = "Task" [ 916.097767] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.115647] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415688, 'name': CreateVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.129156] env[62522]: DEBUG nova.compute.manager [req-802333c3-ecfb-45c8-85b8-374135c7ea33 req-f8d92430-a91c-4fbd-b05e-d9a366405c3e service nova] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Received event network-changed-7e36641e-fc4a-4223-ab07-33dc49821168 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 916.129544] env[62522]: DEBUG nova.compute.manager [req-802333c3-ecfb-45c8-85b8-374135c7ea33 req-f8d92430-a91c-4fbd-b05e-d9a366405c3e service nova] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Refreshing instance network info cache due to event network-changed-7e36641e-fc4a-4223-ab07-33dc49821168. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 916.129544] env[62522]: DEBUG oslo_concurrency.lockutils [req-802333c3-ecfb-45c8-85b8-374135c7ea33 req-f8d92430-a91c-4fbd-b05e-d9a366405c3e service nova] Acquiring lock "refresh_cache-04a9d357-d094-487b-8f09-2f7e0c35f0d7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 916.129920] env[62522]: DEBUG oslo_concurrency.lockutils [req-802333c3-ecfb-45c8-85b8-374135c7ea33 req-f8d92430-a91c-4fbd-b05e-d9a366405c3e service nova] Acquired lock "refresh_cache-04a9d357-d094-487b-8f09-2f7e0c35f0d7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.129920] env[62522]: DEBUG nova.network.neutron [req-802333c3-ecfb-45c8-85b8-374135c7ea33 req-f8d92430-a91c-4fbd-b05e-d9a366405c3e service nova] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Refreshing network info cache for port 7e36641e-fc4a-4223-ab07-33dc49821168 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 916.490803] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08e3402e-3cb8-4d32-85df-810c4ea22a75 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.499032] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6b1473e-759d-4238-a239-11cbc77e7354 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.531140] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66950c9d-f19b-49b1-955b-d1e6f829a95d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.539265] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-057c3146-1f15-4ad6-81be-7102a3566ea1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.553155] env[62522]: DEBUG nova.compute.provider_tree [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 916.563936] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415686, 'name': CreateVM_Task, 'duration_secs': 0.474059} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.564933] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 916.565441] env[62522]: DEBUG oslo_concurrency.lockutils [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 916.565704] env[62522]: DEBUG oslo_concurrency.lockutils [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.566221] env[62522]: DEBUG oslo_concurrency.lockutils [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 916.566819] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8da42a11-d13e-450d-a4e3-f5fd59bb56c7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.571394] env[62522]: DEBUG oslo_vmware.api [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for the task: (returnval){ [ 916.571394] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d6ed0a-f1f5-c013-cb04-821310a7c774" [ 916.571394] env[62522]: _type = "Task" [ 916.571394] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.579431] env[62522]: DEBUG oslo_vmware.api [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d6ed0a-f1f5-c013-cb04-821310a7c774, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.607065] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415688, 'name': CreateVM_Task, 'duration_secs': 0.424436} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.607377] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 916.608253] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 916.737757] env[62522]: DEBUG nova.compute.manager [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 916.739244] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03878cb7-413b-4251-aa57-78dea3992297 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.804975] env[62522]: DEBUG nova.network.neutron [-] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.020473] env[62522]: DEBUG nova.network.neutron [req-802333c3-ecfb-45c8-85b8-374135c7ea33 req-f8d92430-a91c-4fbd-b05e-d9a366405c3e service nova] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Updated VIF entry in instance network info cache for port 7e36641e-fc4a-4223-ab07-33dc49821168. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 917.020920] env[62522]: DEBUG nova.network.neutron [req-802333c3-ecfb-45c8-85b8-374135c7ea33 req-f8d92430-a91c-4fbd-b05e-d9a366405c3e service nova] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Updating instance_info_cache with network_info: [{"id": "7e36641e-fc4a-4223-ab07-33dc49821168", "address": "fa:16:3e:f1:bf:49", "network": {"id": "b837f0fb-c2e1-46dd-93b2-62d6c4352316", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1813744063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed393a0454b643eea75c203d1dfd592c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e36641e-fc", "ovs_interfaceid": "7e36641e-fc4a-4223-ab07-33dc49821168", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.082154] env[62522]: DEBUG oslo_vmware.api [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d6ed0a-f1f5-c013-cb04-821310a7c774, 'name': SearchDatastore_Task, 'duration_secs': 0.017812} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.082597] env[62522]: DEBUG oslo_concurrency.lockutils [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 917.082926] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 917.083273] env[62522]: DEBUG oslo_concurrency.lockutils [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 917.083471] env[62522]: DEBUG oslo_concurrency.lockutils [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.083720] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 917.084128] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.084555] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 917.084804] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-31371268-c5d8-426f-bbf3-8e7552702592 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.086801] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7634ba15-a8fe-496c-aad8-fced35d777b1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.092655] env[62522]: DEBUG oslo_vmware.api [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 917.092655] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52cf3d78-0a01-5e30-ef92-18e95f663299" [ 917.092655] env[62522]: _type = "Task" [ 917.092655] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.093594] env[62522]: DEBUG nova.scheduler.client.report [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Updated inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with generation 94 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 917.093887] env[62522]: DEBUG nova.compute.provider_tree [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Updating resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 generation from 94 to 95 during operation: update_inventory {{(pid=62522) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 917.094536] env[62522]: DEBUG nova.compute.provider_tree [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 917.101457] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 917.101783] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 917.103107] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9f4ffc9-4cd5-4976-8f7b-7d5df152efb9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.108873] env[62522]: DEBUG oslo_vmware.api [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for the task: (returnval){ [ 917.108873] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525f899c-5cb2-b2f9-960b-db03982b47e5" [ 917.108873] env[62522]: _type = "Task" [ 917.108873] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.112809] env[62522]: DEBUG oslo_vmware.api [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52cf3d78-0a01-5e30-ef92-18e95f663299, 'name': SearchDatastore_Task, 'duration_secs': 0.009296} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.115836] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 917.116161] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 917.116419] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 917.122858] env[62522]: DEBUG oslo_vmware.api [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525f899c-5cb2-b2f9-960b-db03982b47e5, 'name': SearchDatastore_Task, 'duration_secs': 0.008566} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.123688] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d1f3566-2b9b-4cbd-a6b5-5d565cec0242 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.128954] env[62522]: DEBUG oslo_vmware.api [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for the task: (returnval){ [ 917.128954] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522f105d-49d1-f78f-994c-128f173e2846" [ 917.128954] env[62522]: _type = "Task" [ 917.128954] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.136916] env[62522]: DEBUG oslo_vmware.api [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522f105d-49d1-f78f-994c-128f173e2846, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.251854] env[62522]: INFO nova.compute.manager [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] instance snapshotting [ 917.255632] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d5abd29-006d-4c70-b59d-029afde52d71 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.276181] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2525bba7-6891-4a4c-abec-a0bb5c41c1af {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.307832] env[62522]: INFO nova.compute.manager [-] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Took 1.35 seconds to deallocate network for instance. [ 917.523917] env[62522]: DEBUG oslo_concurrency.lockutils [req-802333c3-ecfb-45c8-85b8-374135c7ea33 req-f8d92430-a91c-4fbd-b05e-d9a366405c3e service nova] Releasing lock "refresh_cache-04a9d357-d094-487b-8f09-2f7e0c35f0d7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 917.604029] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 3.755s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.604029] env[62522]: INFO nova.compute.manager [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Migrating [ 917.610615] env[62522]: DEBUG oslo_concurrency.lockutils [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.506s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.612460] env[62522]: INFO nova.compute.claims [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 917.640789] env[62522]: DEBUG oslo_vmware.api [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522f105d-49d1-f78f-994c-128f173e2846, 'name': SearchDatastore_Task, 'duration_secs': 0.009127} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.641107] env[62522]: DEBUG oslo_concurrency.lockutils [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 917.641400] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a/ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 917.641691] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.641879] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 917.642113] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8a969d8d-5b54-43d6-abc1-c72144f97434 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.644873] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-88bf7d12-9dd8-4e3b-b4a6-926fca2da782 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.655915] env[62522]: DEBUG oslo_vmware.api [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for the task: (returnval){ [ 917.655915] env[62522]: value = "task-2415689" [ 917.655915] env[62522]: _type = "Task" [ 917.655915] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.655915] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 917.656177] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 917.656861] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d65ec86a-7023-4e7e-864e-4ec65fea92ab {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.666035] env[62522]: DEBUG oslo_vmware.api [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 917.666035] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52548642-4377-a539-7f16-ed9c8f84242c" [ 917.666035] env[62522]: _type = "Task" [ 917.666035] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.669217] env[62522]: DEBUG oslo_vmware.api [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415689, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.677443] env[62522]: DEBUG oslo_vmware.api [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52548642-4377-a539-7f16-ed9c8f84242c, 'name': SearchDatastore_Task, 'duration_secs': 0.008915} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.678192] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e48c951-d0c0-48c0-af3f-359085f153a5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.682995] env[62522]: DEBUG oslo_vmware.api [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 917.682995] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52ccfcd9-996e-b8de-52e8-cf334b3eb807" [ 917.682995] env[62522]: _type = "Task" [ 917.682995] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.690604] env[62522]: DEBUG oslo_vmware.api [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52ccfcd9-996e-b8de-52e8-cf334b3eb807, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.788715] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Creating Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 917.789240] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d7558db7-52cf-4705-b443-46150029631c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.797265] env[62522]: DEBUG oslo_vmware.api [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the task: (returnval){ [ 917.797265] env[62522]: value = "task-2415690" [ 917.797265] env[62522]: _type = "Task" [ 917.797265] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.807241] env[62522]: DEBUG oslo_vmware.api [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415690, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.815620] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7013c7f3-cf4c-41ea-bb6d-82f5f9898686 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.123537] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquiring lock "refresh_cache-74e52638-d284-4bd1-8cff-c7aca9426f75" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 918.124022] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquired lock "refresh_cache-74e52638-d284-4bd1-8cff-c7aca9426f75" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.124022] env[62522]: DEBUG nova.network.neutron [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 918.167583] env[62522]: DEBUG oslo_vmware.api [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415689, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.473927} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.167838] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a/ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 918.168053] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 918.168309] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5de204b4-1e77-4f07-b35b-9c668bfc8e6a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.176479] env[62522]: DEBUG oslo_vmware.api [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for the task: (returnval){ [ 918.176479] env[62522]: value = "task-2415691" [ 918.176479] env[62522]: _type = "Task" [ 918.176479] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.181547] env[62522]: DEBUG nova.compute.manager [req-1330e4a6-7312-44eb-850f-8a8f20c7ebdb req-ecd4a76d-626d-4173-8ab2-26b7dcd9971e service nova] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Received event network-vif-deleted-19506fca-4c28-41e4-b1fb-d6386948229a {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 918.186866] env[62522]: DEBUG oslo_vmware.api [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415691, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.195921] env[62522]: DEBUG oslo_vmware.api [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52ccfcd9-996e-b8de-52e8-cf334b3eb807, 'name': SearchDatastore_Task, 'duration_secs': 0.008008} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.196206] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 918.196466] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 04a9d357-d094-487b-8f09-2f7e0c35f0d7/04a9d357-d094-487b-8f09-2f7e0c35f0d7.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 918.196717] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4ce546d6-40e7-44c8-86e1-6ab5efaa46de {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.202285] env[62522]: DEBUG oslo_vmware.api [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 918.202285] env[62522]: value = "task-2415692" [ 918.202285] env[62522]: _type = "Task" [ 918.202285] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.212577] env[62522]: DEBUG oslo_vmware.api [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415692, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.309113] env[62522]: DEBUG oslo_vmware.api [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415690, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.688845] env[62522]: DEBUG oslo_vmware.api [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415691, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079248} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.691381] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 918.692238] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1350524-8973-45fb-846d-6d9968bf98c7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.718109] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a/ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 918.724110] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9713837e-a68b-4387-a4fc-dbb2ec32c372 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.747912] env[62522]: DEBUG oslo_vmware.api [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415692, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.749583] env[62522]: DEBUG oslo_vmware.api [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for the task: (returnval){ [ 918.749583] env[62522]: value = "task-2415693" [ 918.749583] env[62522]: _type = "Task" [ 918.749583] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.762350] env[62522]: DEBUG oslo_vmware.api [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415693, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.811158] env[62522]: DEBUG oslo_vmware.api [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415690, 'name': CreateSnapshot_Task, 'duration_secs': 0.675614} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.813894] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Created Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 918.814976] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eb9f4ea-cccb-40dc-813d-150cc628d1eb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.920761] env[62522]: DEBUG nova.network.neutron [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Updating instance_info_cache with network_info: [{"id": "d830d64b-94fa-4bc8-a3e6-e45c4b0ae629", "address": "fa:16:3e:d4:80:4e", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.185", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd830d64b-94", "ovs_interfaceid": "d830d64b-94fa-4bc8-a3e6-e45c4b0ae629", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.119662] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc8c9496-71a0-4198-a85f-b25c094c1d62 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.128186] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5beab4df-4e2b-474b-9e24-f5cd481c40f3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.160186] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43e230ba-adea-4568-9036-56a83998e30f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.169145] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f15d8e59-3068-42f5-88b1-6a27bc80c472 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.185413] env[62522]: DEBUG nova.compute.provider_tree [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 919.220130] env[62522]: DEBUG oslo_vmware.api [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415692, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.262338] env[62522]: DEBUG oslo_vmware.api [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415693, 'name': ReconfigVM_Task, 'duration_secs': 0.322183} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.262712] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Reconfigured VM instance instance-00000041 to attach disk [datastore1] ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a/ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 919.263738] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0eba50c4-dd59-45e7-b70e-30415e364845 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.275107] env[62522]: DEBUG oslo_vmware.api [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for the task: (returnval){ [ 919.275107] env[62522]: value = "task-2415694" [ 919.275107] env[62522]: _type = "Task" [ 919.275107] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.289871] env[62522]: DEBUG oslo_vmware.api [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415694, 'name': Rename_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.338882] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Creating linked-clone VM from snapshot {{(pid=62522) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 919.339247] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-97f2ee03-3fe6-4083-9eec-f806ca22a068 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.349883] env[62522]: DEBUG oslo_vmware.api [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the task: (returnval){ [ 919.349883] env[62522]: value = "task-2415695" [ 919.349883] env[62522]: _type = "Task" [ 919.349883] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.360279] env[62522]: DEBUG oslo_vmware.api [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415695, 'name': CloneVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.424218] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Releasing lock "refresh_cache-74e52638-d284-4bd1-8cff-c7aca9426f75" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 919.720506] env[62522]: DEBUG oslo_vmware.api [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415692, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.38764} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.721473] env[62522]: DEBUG nova.scheduler.client.report [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Updated inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with generation 95 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 919.721717] env[62522]: DEBUG nova.compute.provider_tree [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Updating resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 generation from 95 to 96 during operation: update_inventory {{(pid=62522) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 919.721968] env[62522]: DEBUG nova.compute.provider_tree [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 919.725247] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 04a9d357-d094-487b-8f09-2f7e0c35f0d7/04a9d357-d094-487b-8f09-2f7e0c35f0d7.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 919.725460] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 919.725754] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1d8523a6-efeb-48bc-9e4c-6872d2b85c6b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.734997] env[62522]: DEBUG oslo_vmware.api [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 919.734997] env[62522]: value = "task-2415696" [ 919.734997] env[62522]: _type = "Task" [ 919.734997] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.743403] env[62522]: DEBUG oslo_vmware.api [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415696, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.785643] env[62522]: DEBUG oslo_vmware.api [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415694, 'name': Rename_Task, 'duration_secs': 0.277417} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.785909] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 919.786200] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4be69261-979e-48b1-9443-f655bb56dac4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.793685] env[62522]: DEBUG oslo_vmware.api [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for the task: (returnval){ [ 919.793685] env[62522]: value = "task-2415697" [ 919.793685] env[62522]: _type = "Task" [ 919.793685] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.802308] env[62522]: DEBUG oslo_vmware.api [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415697, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.861203] env[62522]: DEBUG oslo_vmware.api [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415695, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.229502] env[62522]: DEBUG oslo_concurrency.lockutils [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.619s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.230281] env[62522]: DEBUG nova.compute.manager [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 920.233314] env[62522]: DEBUG oslo_concurrency.lockutils [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.249s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.235554] env[62522]: INFO nova.compute.claims [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 920.248810] env[62522]: DEBUG oslo_vmware.api [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415696, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081431} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.249268] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 920.250366] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ddc133e-6377-401a-92b5-b8345b81ad20 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.277121] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] 04a9d357-d094-487b-8f09-2f7e0c35f0d7/04a9d357-d094-487b-8f09-2f7e0c35f0d7.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 920.278520] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c5751ce4-a948-451d-9cb1-c60177c2e228 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.316095] env[62522]: DEBUG oslo_vmware.api [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415697, 'name': PowerOnVM_Task, 'duration_secs': 0.438017} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.317528] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 920.317750] env[62522]: INFO nova.compute.manager [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Took 4.44 seconds to spawn the instance on the hypervisor. [ 920.317979] env[62522]: DEBUG nova.compute.manager [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 920.318526] env[62522]: DEBUG oslo_vmware.api [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 920.318526] env[62522]: value = "task-2415698" [ 920.318526] env[62522]: _type = "Task" [ 920.318526] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.319555] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e59f0c63-a85f-49f5-9066-1dbd70833161 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.340831] env[62522]: DEBUG oslo_vmware.api [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415698, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.364943] env[62522]: DEBUG oslo_vmware.api [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415695, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.739786] env[62522]: DEBUG nova.compute.utils [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 920.743488] env[62522]: DEBUG nova.compute.manager [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Not allocating networking since 'none' was specified. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 920.834638] env[62522]: DEBUG oslo_vmware.api [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415698, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.850094] env[62522]: INFO nova.compute.manager [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Took 47.01 seconds to build instance. [ 920.863448] env[62522]: DEBUG oslo_vmware.api [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415695, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.941291] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea10f118-3af0-47b1-97e4-f1e5cd078d60 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.961881] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Updating instance '74e52638-d284-4bd1-8cff-c7aca9426f75' progress to 0 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 921.244284] env[62522]: DEBUG nova.compute.manager [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 921.340031] env[62522]: DEBUG oslo_vmware.api [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415698, 'name': ReconfigVM_Task, 'duration_secs': 0.643305} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.340031] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Reconfigured VM instance instance-00000040 to attach disk [datastore1] 04a9d357-d094-487b-8f09-2f7e0c35f0d7/04a9d357-d094-487b-8f09-2f7e0c35f0d7.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 921.340506] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d4ba7357-2608-452d-8469-aea9326bb671 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.348647] env[62522]: DEBUG oslo_vmware.api [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 921.348647] env[62522]: value = "task-2415699" [ 921.348647] env[62522]: _type = "Task" [ 921.348647] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.352346] env[62522]: DEBUG oslo_concurrency.lockutils [None req-08159d3d-ce8b-450a-9288-cc96f0d27901 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Lock "ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.395s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.364552] env[62522]: DEBUG oslo_vmware.api [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415699, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.372922] env[62522]: DEBUG oslo_vmware.api [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415695, 'name': CloneVM_Task} progress is 95%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.471624] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 921.471932] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-45f87183-5271-4ded-899b-de81ac5ef009 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.480221] env[62522]: DEBUG oslo_vmware.api [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 921.480221] env[62522]: value = "task-2415700" [ 921.480221] env[62522]: _type = "Task" [ 921.480221] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.494927] env[62522]: DEBUG oslo_vmware.api [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415700, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.755616] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd97a550-73e3-4de7-9cf5-0cacd8e5f03d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.763989] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e4eeec-6a9c-4d10-81f4-7c609390687e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.798173] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28deb188-688f-44ac-9cd4-31efc542703f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.807838] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7595773-b554-4af1-b347-6618ced4d9be {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.825033] env[62522]: DEBUG nova.compute.provider_tree [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 921.855296] env[62522]: DEBUG nova.compute.manager [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 921.867056] env[62522]: DEBUG oslo_vmware.api [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415699, 'name': Rename_Task, 'duration_secs': 0.262378} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.867734] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 921.868501] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fe3ea1bd-ff89-459e-8398-ca66ada35302 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.875225] env[62522]: DEBUG oslo_vmware.api [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415695, 'name': CloneVM_Task, 'duration_secs': 2.209183} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.875532] env[62522]: INFO nova.virt.vmwareapi.vmops [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Created linked-clone VM from snapshot [ 921.876399] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72db1d9a-232e-4d18-b1e5-b3f48b650e72 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.880751] env[62522]: DEBUG oslo_vmware.api [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 921.880751] env[62522]: value = "task-2415701" [ 921.880751] env[62522]: _type = "Task" [ 921.880751] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.891479] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Uploading image a208a1a2-a7f4-4e1b-ac0b-c961a531fddd {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 921.900594] env[62522]: DEBUG oslo_vmware.api [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415701, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.913640] env[62522]: DEBUG oslo_vmware.rw_handles [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 921.913640] env[62522]: value = "vm-489748" [ 921.913640] env[62522]: _type = "VirtualMachine" [ 921.913640] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 921.913937] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-339fe8f6-f433-4b06-9838-a600e0bd07eb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.922931] env[62522]: DEBUG oslo_vmware.rw_handles [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Lease: (returnval){ [ 921.922931] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52479015-ba01-abdb-e95a-c8806c7c626e" [ 921.922931] env[62522]: _type = "HttpNfcLease" [ 921.922931] env[62522]: } obtained for exporting VM: (result){ [ 921.922931] env[62522]: value = "vm-489748" [ 921.922931] env[62522]: _type = "VirtualMachine" [ 921.922931] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 921.923593] env[62522]: DEBUG oslo_vmware.api [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the lease: (returnval){ [ 921.923593] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52479015-ba01-abdb-e95a-c8806c7c626e" [ 921.923593] env[62522]: _type = "HttpNfcLease" [ 921.923593] env[62522]: } to be ready. {{(pid=62522) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 921.931765] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 921.931765] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52479015-ba01-abdb-e95a-c8806c7c626e" [ 921.931765] env[62522]: _type = "HttpNfcLease" [ 921.931765] env[62522]: } is initializing. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 921.994846] env[62522]: DEBUG oslo_vmware.api [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415700, 'name': PowerOffVM_Task, 'duration_secs': 0.284634} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.995715] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 921.995715] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Updating instance '74e52638-d284-4bd1-8cff-c7aca9426f75' progress to 17 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 922.254912] env[62522]: DEBUG nova.compute.manager [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 922.278516] env[62522]: DEBUG nova.virt.hardware [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 922.278765] env[62522]: DEBUG nova.virt.hardware [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 922.278925] env[62522]: DEBUG nova.virt.hardware [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 922.279127] env[62522]: DEBUG nova.virt.hardware [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 922.279279] env[62522]: DEBUG nova.virt.hardware [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 922.279427] env[62522]: DEBUG nova.virt.hardware [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 922.279644] env[62522]: DEBUG nova.virt.hardware [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 922.279804] env[62522]: DEBUG nova.virt.hardware [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 922.279973] env[62522]: DEBUG nova.virt.hardware [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 922.280222] env[62522]: DEBUG nova.virt.hardware [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 922.280422] env[62522]: DEBUG nova.virt.hardware [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 922.281458] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f064816-6db5-423a-9c31-e7ec8f2a048c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.290897] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b9d0416-a78f-4eec-80ef-1c498d4240f6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.306150] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Instance VIF info [] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 922.312017] env[62522]: DEBUG oslo.service.loopingcall [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 922.312326] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 922.312565] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3b6fd6f3-4acf-4ce3-bd08-c2424f7dca69 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.327583] env[62522]: DEBUG nova.scheduler.client.report [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 922.332981] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 922.332981] env[62522]: value = "task-2415703" [ 922.332981] env[62522]: _type = "Task" [ 922.332981] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.342464] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415703, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.386886] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.394514] env[62522]: DEBUG oslo_vmware.api [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415701, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.433620] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 922.433620] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52479015-ba01-abdb-e95a-c8806c7c626e" [ 922.433620] env[62522]: _type = "HttpNfcLease" [ 922.433620] env[62522]: } is ready. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 922.433857] env[62522]: DEBUG oslo_vmware.rw_handles [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 922.433857] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52479015-ba01-abdb-e95a-c8806c7c626e" [ 922.433857] env[62522]: _type = "HttpNfcLease" [ 922.433857] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 922.434627] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb7f08a3-df9b-45a6-a36b-7b975090d059 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.445789] env[62522]: DEBUG oslo_vmware.rw_handles [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fe1ef7-6923-016a-4acd-d4651b549f6f/disk-0.vmdk from lease info. {{(pid=62522) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 922.445789] env[62522]: DEBUG oslo_vmware.rw_handles [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fe1ef7-6923-016a-4acd-d4651b549f6f/disk-0.vmdk for reading. {{(pid=62522) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 922.504515] env[62522]: DEBUG nova.virt.hardware [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:04Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 922.504755] env[62522]: DEBUG nova.virt.hardware [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 922.504899] env[62522]: DEBUG nova.virt.hardware [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 922.505225] env[62522]: DEBUG nova.virt.hardware [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 922.505332] env[62522]: DEBUG nova.virt.hardware [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 922.505811] env[62522]: DEBUG nova.virt.hardware [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 922.505986] env[62522]: DEBUG nova.virt.hardware [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 922.506175] env[62522]: DEBUG nova.virt.hardware [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 922.506363] env[62522]: DEBUG nova.virt.hardware [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 922.506514] env[62522]: DEBUG nova.virt.hardware [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 922.506687] env[62522]: DEBUG nova.virt.hardware [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 922.513030] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b69965e-f008-4f88-9ea1-0f5f9c7b8f42 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.533561] env[62522]: DEBUG oslo_vmware.api [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 922.533561] env[62522]: value = "task-2415704" [ 922.533561] env[62522]: _type = "Task" [ 922.533561] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.543370] env[62522]: DEBUG oslo_vmware.api [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415704, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.580531] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-2a0d6167-bc0d-4871-9763-47f342871a85 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.834266] env[62522]: DEBUG oslo_concurrency.lockutils [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.601s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.834869] env[62522]: DEBUG nova.compute.manager [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 922.840486] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.332s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.841980] env[62522]: INFO nova.compute.claims [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 922.858133] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415703, 'name': CreateVM_Task, 'duration_secs': 0.382329} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.859031] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 922.859031] env[62522]: DEBUG oslo_concurrency.lockutils [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 922.859031] env[62522]: DEBUG oslo_concurrency.lockutils [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.859290] env[62522]: DEBUG oslo_concurrency.lockutils [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 922.859550] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-780dcbd0-3c7d-4796-a752-33613702bb6c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.868812] env[62522]: DEBUG oslo_vmware.api [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for the task: (returnval){ [ 922.868812] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5225b53b-951c-2888-388f-a13a484e2957" [ 922.868812] env[62522]: _type = "Task" [ 922.868812] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.879251] env[62522]: DEBUG oslo_vmware.api [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5225b53b-951c-2888-388f-a13a484e2957, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.892859] env[62522]: DEBUG oslo_vmware.api [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415701, 'name': PowerOnVM_Task, 'duration_secs': 0.559781} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.893222] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 922.893447] env[62522]: INFO nova.compute.manager [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Took 9.61 seconds to spawn the instance on the hypervisor. [ 922.893677] env[62522]: DEBUG nova.compute.manager [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 922.894588] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8480d1f5-830a-432a-86cf-b43a16bb5e4f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.044293] env[62522]: DEBUG oslo_vmware.api [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415704, 'name': ReconfigVM_Task, 'duration_secs': 0.238644} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.044746] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Updating instance '74e52638-d284-4bd1-8cff-c7aca9426f75' progress to 33 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 923.342603] env[62522]: DEBUG nova.compute.utils [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 923.344354] env[62522]: DEBUG nova.compute.manager [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 923.344671] env[62522]: DEBUG nova.network.neutron [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 923.381184] env[62522]: DEBUG oslo_vmware.api [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5225b53b-951c-2888-388f-a13a484e2957, 'name': SearchDatastore_Task, 'duration_secs': 0.016475} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.381651] env[62522]: DEBUG oslo_concurrency.lockutils [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 923.381919] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 923.382257] env[62522]: DEBUG oslo_concurrency.lockutils [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 923.382672] env[62522]: DEBUG oslo_concurrency.lockutils [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.382672] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 923.382983] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eae69269-3473-4260-b98b-186ff4d3a770 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.394332] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 923.394576] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 923.395531] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2f162b8-6828-4a6c-b7ad-f2db84ef4d9f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.404224] env[62522]: DEBUG oslo_vmware.api [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for the task: (returnval){ [ 923.404224] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52024381-be41-baf3-ea7e-13dfd3051b22" [ 923.404224] env[62522]: _type = "Task" [ 923.404224] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.414837] env[62522]: DEBUG nova.policy [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e959d2cd75d94a38b0d6a7b93f74f819', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '13a5a5169d8345a7a88fef5ff0ecd26e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 923.420766] env[62522]: INFO nova.compute.manager [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Took 53.19 seconds to build instance. [ 923.428252] env[62522]: DEBUG oslo_vmware.api [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52024381-be41-baf3-ea7e-13dfd3051b22, 'name': SearchDatastore_Task, 'duration_secs': 0.011826} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.429587] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9dffc71-cb7d-4941-831b-1751244d71c9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.436343] env[62522]: DEBUG oslo_vmware.api [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for the task: (returnval){ [ 923.436343] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526171d8-2ca6-9824-a0ad-6e233ef20a0c" [ 923.436343] env[62522]: _type = "Task" [ 923.436343] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.446983] env[62522]: DEBUG oslo_vmware.api [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526171d8-2ca6-9824-a0ad-6e233ef20a0c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.551265] env[62522]: DEBUG nova.virt.hardware [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:21:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1bf21d87-4ee8-4637-a3ba-85267d79b549',id=39,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1714837760',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 923.552196] env[62522]: DEBUG nova.virt.hardware [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 923.552196] env[62522]: DEBUG nova.virt.hardware [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 923.552347] env[62522]: DEBUG nova.virt.hardware [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 923.552477] env[62522]: DEBUG nova.virt.hardware [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 923.552700] env[62522]: DEBUG nova.virt.hardware [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 923.552984] env[62522]: DEBUG nova.virt.hardware [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 923.553294] env[62522]: DEBUG nova.virt.hardware [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 923.554013] env[62522]: DEBUG nova.virt.hardware [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 923.554013] env[62522]: DEBUG nova.virt.hardware [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 923.554013] env[62522]: DEBUG nova.virt.hardware [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 923.559373] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Reconfiguring VM instance instance-00000037 to detach disk 2000 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 923.559706] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ecfd42b4-c20f-4ccf-b057-09c1e5a5a8dc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.585442] env[62522]: DEBUG oslo_vmware.api [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 923.585442] env[62522]: value = "task-2415705" [ 923.585442] env[62522]: _type = "Task" [ 923.585442] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.600628] env[62522]: DEBUG oslo_vmware.api [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415705, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.767441] env[62522]: DEBUG nova.network.neutron [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Successfully created port: 808a9620-b31f-4e61-bb51-e2de4a3d3a7e {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 923.848428] env[62522]: DEBUG nova.compute.manager [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 923.924626] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ed367cd1-da4d-4cb2-8fee-48608eb464ba tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lock "04a9d357-d094-487b-8f09-2f7e0c35f0d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 94.118s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.949211] env[62522]: DEBUG oslo_vmware.api [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526171d8-2ca6-9824-a0ad-6e233ef20a0c, 'name': SearchDatastore_Task, 'duration_secs': 0.017449} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.949648] env[62522]: DEBUG oslo_concurrency.lockutils [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 923.949781] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a/72e054d2-79bb-4ef8-82d1-4e67ba0ef20a.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 923.949986] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2acd7038-613a-4248-8107-1002818f35c3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.958733] env[62522]: DEBUG oslo_vmware.api [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for the task: (returnval){ [ 923.958733] env[62522]: value = "task-2415706" [ 923.958733] env[62522]: _type = "Task" [ 923.958733] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.971832] env[62522]: DEBUG oslo_vmware.api [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415706, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.096333] env[62522]: DEBUG oslo_vmware.api [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415705, 'name': ReconfigVM_Task, 'duration_secs': 0.282126} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.096810] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Reconfigured VM instance instance-00000037 to detach disk 2000 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 924.097781] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a6a833-fe9b-4f85-b1d7-3d103b951319 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.125841] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Reconfiguring VM instance instance-00000037 to attach disk [datastore2] 74e52638-d284-4bd1-8cff-c7aca9426f75/74e52638-d284-4bd1-8cff-c7aca9426f75.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 924.129076] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67f7d6ba-ebac-43f1-a8d0-284e7126a0dd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.149447] env[62522]: DEBUG oslo_vmware.api [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 924.149447] env[62522]: value = "task-2415707" [ 924.149447] env[62522]: _type = "Task" [ 924.149447] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.163446] env[62522]: DEBUG oslo_vmware.api [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415707, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.234234] env[62522]: DEBUG oslo_vmware.rw_handles [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fc815e-cad6-5842-9240-15e912f523fb/disk-0.vmdk. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 924.235425] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb4aa930-82ef-4fce-91ec-0070a95c4f3c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.242749] env[62522]: DEBUG oslo_vmware.rw_handles [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fc815e-cad6-5842-9240-15e912f523fb/disk-0.vmdk is in state: ready. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 924.242928] env[62522]: ERROR oslo_vmware.rw_handles [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fc815e-cad6-5842-9240-15e912f523fb/disk-0.vmdk due to incomplete transfer. [ 924.243205] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d6cf2ca5-dd00-4655-bf95-8b06948b156e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.252440] env[62522]: DEBUG oslo_vmware.rw_handles [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fc815e-cad6-5842-9240-15e912f523fb/disk-0.vmdk. {{(pid=62522) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 924.252644] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Uploaded image a0d9a5ba-d51f-44aa-afe5-9fd506bb5ca6 to the Glance image server {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 924.254824] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Destroying the VM {{(pid=62522) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 924.255124] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0c1f5e78-003d-4978-b580-bedb2f69b3c4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.265503] env[62522]: DEBUG oslo_vmware.api [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 924.265503] env[62522]: value = "task-2415708" [ 924.265503] env[62522]: _type = "Task" [ 924.265503] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.276372] env[62522]: DEBUG oslo_vmware.api [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415708, 'name': Destroy_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.330571] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a79f050c-37b8-4850-8694-d62fe74e12ae {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.338881] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49090c01-cdc7-4866-9873-963fdec4af53 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.380360] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd8d22b8-25d0-44fd-a3e2-832311669635 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.390987] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b911e1-d31e-4f49-97e1-890b55535e7a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.408031] env[62522]: DEBUG nova.compute.provider_tree [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 924.473108] env[62522]: DEBUG oslo_vmware.api [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415706, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.661665] env[62522]: DEBUG oslo_vmware.api [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415707, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.777111] env[62522]: DEBUG oslo_vmware.api [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415708, 'name': Destroy_Task} progress is 33%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.887750] env[62522]: DEBUG nova.compute.manager [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 924.911875] env[62522]: DEBUG nova.scheduler.client.report [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 924.925309] env[62522]: DEBUG nova.virt.hardware [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 924.925585] env[62522]: DEBUG nova.virt.hardware [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 924.925743] env[62522]: DEBUG nova.virt.hardware [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 924.925925] env[62522]: DEBUG nova.virt.hardware [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 924.926280] env[62522]: DEBUG nova.virt.hardware [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 924.926518] env[62522]: DEBUG nova.virt.hardware [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 924.926775] env[62522]: DEBUG nova.virt.hardware [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 924.926978] env[62522]: DEBUG nova.virt.hardware [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 924.927218] env[62522]: DEBUG nova.virt.hardware [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 924.927438] env[62522]: DEBUG nova.virt.hardware [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 924.927672] env[62522]: DEBUG nova.virt.hardware [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 924.928845] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4959d5b-78a7-4410-9473-6e38b3276f41 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.940023] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da9c78b7-9d6b-4310-8b83-eb3ac5ff2fe7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.975429] env[62522]: DEBUG oslo_vmware.api [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415706, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.162636] env[62522]: DEBUG oslo_vmware.api [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415707, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.276878] env[62522]: DEBUG oslo_vmware.api [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415708, 'name': Destroy_Task, 'duration_secs': 0.588026} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.277591] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Destroyed the VM [ 925.277591] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Deleting Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 925.277709] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-865c5a6b-3d4c-471e-bc64-42720647c647 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.286613] env[62522]: DEBUG oslo_vmware.api [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 925.286613] env[62522]: value = "task-2415709" [ 925.286613] env[62522]: _type = "Task" [ 925.286613] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.296635] env[62522]: DEBUG oslo_vmware.api [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415709, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.418792] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.577s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.418792] env[62522]: DEBUG nova.compute.manager [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 925.421380] env[62522]: DEBUG oslo_concurrency.lockutils [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.533s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.422998] env[62522]: INFO nova.compute.claims [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 925.476130] env[62522]: DEBUG oslo_vmware.api [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415706, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.321406} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.477675] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a/72e054d2-79bb-4ef8-82d1-4e67ba0ef20a.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 925.477939] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 925.478481] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dcfcc801-f022-4259-9efe-0f60e6dfc58e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.488823] env[62522]: DEBUG oslo_vmware.api [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for the task: (returnval){ [ 925.488823] env[62522]: value = "task-2415710" [ 925.488823] env[62522]: _type = "Task" [ 925.488823] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.500192] env[62522]: DEBUG oslo_vmware.api [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415710, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.524563] env[62522]: DEBUG nova.network.neutron [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Successfully updated port: 808a9620-b31f-4e61-bb51-e2de4a3d3a7e {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 925.625485] env[62522]: DEBUG nova.compute.manager [req-1379f764-0546-4454-8a27-de1b1cf418f6 req-dba45d21-48b1-41d7-858f-32a92e02bb20 service nova] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Received event network-vif-plugged-808a9620-b31f-4e61-bb51-e2de4a3d3a7e {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 925.625607] env[62522]: DEBUG oslo_concurrency.lockutils [req-1379f764-0546-4454-8a27-de1b1cf418f6 req-dba45d21-48b1-41d7-858f-32a92e02bb20 service nova] Acquiring lock "8b21b749-b872-43f7-a2c5-aefee6c5f3a1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.625821] env[62522]: DEBUG oslo_concurrency.lockutils [req-1379f764-0546-4454-8a27-de1b1cf418f6 req-dba45d21-48b1-41d7-858f-32a92e02bb20 service nova] Lock "8b21b749-b872-43f7-a2c5-aefee6c5f3a1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.626036] env[62522]: DEBUG oslo_concurrency.lockutils [req-1379f764-0546-4454-8a27-de1b1cf418f6 req-dba45d21-48b1-41d7-858f-32a92e02bb20 service nova] Lock "8b21b749-b872-43f7-a2c5-aefee6c5f3a1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.626197] env[62522]: DEBUG nova.compute.manager [req-1379f764-0546-4454-8a27-de1b1cf418f6 req-dba45d21-48b1-41d7-858f-32a92e02bb20 service nova] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] No waiting events found dispatching network-vif-plugged-808a9620-b31f-4e61-bb51-e2de4a3d3a7e {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 925.626378] env[62522]: WARNING nova.compute.manager [req-1379f764-0546-4454-8a27-de1b1cf418f6 req-dba45d21-48b1-41d7-858f-32a92e02bb20 service nova] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Received unexpected event network-vif-plugged-808a9620-b31f-4e61-bb51-e2de4a3d3a7e for instance with vm_state building and task_state spawning. [ 925.662542] env[62522]: DEBUG oslo_vmware.api [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415707, 'name': ReconfigVM_Task, 'duration_secs': 1.154804} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.662666] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Reconfigured VM instance instance-00000037 to attach disk [datastore2] 74e52638-d284-4bd1-8cff-c7aca9426f75/74e52638-d284-4bd1-8cff-c7aca9426f75.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 925.662895] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Updating instance '74e52638-d284-4bd1-8cff-c7aca9426f75' progress to 50 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 925.797856] env[62522]: DEBUG oslo_vmware.api [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415709, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.928935] env[62522]: DEBUG nova.compute.utils [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 925.932066] env[62522]: DEBUG nova.compute.manager [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 925.932257] env[62522]: DEBUG nova.network.neutron [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 925.999570] env[62522]: DEBUG oslo_vmware.api [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415710, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077048} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.000407] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 926.000934] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c80fb869-0a29-46fa-a7d0-7af8a1d7bdf4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.022864] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a/72e054d2-79bb-4ef8-82d1-4e67ba0ef20a.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 926.024550] env[62522]: DEBUG nova.policy [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '35ad126a48114240927e0ffaf217e35d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '72db051c023d4293b22a8a58a517374c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 926.026135] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9efc9bbb-1048-4c2f-86f2-f45c5a315474 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.041886] env[62522]: DEBUG oslo_concurrency.lockutils [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquiring lock "refresh_cache-8b21b749-b872-43f7-a2c5-aefee6c5f3a1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.042070] env[62522]: DEBUG oslo_concurrency.lockutils [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquired lock "refresh_cache-8b21b749-b872-43f7-a2c5-aefee6c5f3a1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.042294] env[62522]: DEBUG nova.network.neutron [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 926.050755] env[62522]: DEBUG oslo_vmware.api [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for the task: (returnval){ [ 926.050755] env[62522]: value = "task-2415711" [ 926.050755] env[62522]: _type = "Task" [ 926.050755] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.062479] env[62522]: DEBUG oslo_vmware.api [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415711, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.169655] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a38c116-4a59-4896-bbf1-6f7e7ef131fc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.196255] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquiring lock "7e5fc552-748f-4569-bd61-c81a52bb46b0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.196536] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lock "7e5fc552-748f-4569-bd61-c81a52bb46b0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.198645] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4266d53-458a-4d62-8978-0a81ab00d52c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.219381] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Updating instance '74e52638-d284-4bd1-8cff-c7aca9426f75' progress to 67 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 926.298195] env[62522]: DEBUG oslo_vmware.api [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415709, 'name': RemoveSnapshot_Task, 'duration_secs': 0.645979} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.298488] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Deleted Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 926.298720] env[62522]: INFO nova.compute.manager [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Took 15.61 seconds to snapshot the instance on the hypervisor. [ 926.302753] env[62522]: DEBUG oslo_concurrency.lockutils [None req-01dccc48-c48f-4870-8972-b57f018d667a tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "845f99b8-4a9d-4fbe-89e1-825a5ddd01f2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.302971] env[62522]: DEBUG oslo_concurrency.lockutils [None req-01dccc48-c48f-4870-8972-b57f018d667a tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "845f99b8-4a9d-4fbe-89e1-825a5ddd01f2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.303193] env[62522]: DEBUG oslo_concurrency.lockutils [None req-01dccc48-c48f-4870-8972-b57f018d667a tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "845f99b8-4a9d-4fbe-89e1-825a5ddd01f2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.303379] env[62522]: DEBUG oslo_concurrency.lockutils [None req-01dccc48-c48f-4870-8972-b57f018d667a tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "845f99b8-4a9d-4fbe-89e1-825a5ddd01f2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.303544] env[62522]: DEBUG oslo_concurrency.lockutils [None req-01dccc48-c48f-4870-8972-b57f018d667a tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "845f99b8-4a9d-4fbe-89e1-825a5ddd01f2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.305401] env[62522]: INFO nova.compute.manager [None req-01dccc48-c48f-4870-8972-b57f018d667a tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Terminating instance [ 926.433529] env[62522]: DEBUG nova.compute.manager [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 926.559884] env[62522]: DEBUG nova.network.neutron [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Successfully created port: 50a9aa3e-b35e-4feb-b010-5e72dd8c2252 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 926.569111] env[62522]: DEBUG oslo_vmware.api [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415711, 'name': ReconfigVM_Task, 'duration_secs': 0.398875} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.569433] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Reconfigured VM instance instance-00000042 to attach disk [datastore1] 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a/72e054d2-79bb-4ef8-82d1-4e67ba0ef20a.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 926.570078] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-49f8d43d-b1e9-4fb2-84ef-1857981004eb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.581064] env[62522]: DEBUG nova.network.neutron [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 926.584796] env[62522]: DEBUG oslo_vmware.api [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for the task: (returnval){ [ 926.584796] env[62522]: value = "task-2415712" [ 926.584796] env[62522]: _type = "Task" [ 926.584796] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.598715] env[62522]: DEBUG oslo_vmware.api [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415712, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.702333] env[62522]: DEBUG nova.compute.manager [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 926.760907] env[62522]: DEBUG nova.network.neutron [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Updating instance_info_cache with network_info: [{"id": "808a9620-b31f-4e61-bb51-e2de4a3d3a7e", "address": "fa:16:3e:e1:dd:f7", "network": {"id": "be69fd15-aa3c-4e6e-9334-57674f1f2d81", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-543389568-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13a5a5169d8345a7a88fef5ff0ecd26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap808a9620-b3", "ovs_interfaceid": "808a9620-b31f-4e61-bb51-e2de4a3d3a7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.786236] env[62522]: DEBUG nova.network.neutron [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Port d830d64b-94fa-4bc8-a3e6-e45c4b0ae629 binding to destination host cpu-1 is already ACTIVE {{(pid=62522) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 926.802060] env[62522]: DEBUG nova.compute.manager [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Instance disappeared during snapshot {{(pid=62522) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 926.809570] env[62522]: DEBUG nova.compute.manager [None req-01dccc48-c48f-4870-8972-b57f018d667a tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 926.809570] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-01dccc48-c48f-4870-8972-b57f018d667a tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 926.810365] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34e36357-7441-4a8c-a14c-605b308f4680 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.816612] env[62522]: DEBUG nova.compute.manager [None req-a2f8ede6-fef6-4970-ad78-31e748aef7ff tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Image not found during clean up a0d9a5ba-d51f-44aa-afe5-9fd506bb5ca6 {{(pid=62522) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 926.827613] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-01dccc48-c48f-4870-8972-b57f018d667a tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 926.827902] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-08a66ca4-34a4-4fb2-8943-6e3f7081ebaa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.900396] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-01dccc48-c48f-4870-8972-b57f018d667a tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 926.900661] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-01dccc48-c48f-4870-8972-b57f018d667a tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 926.900931] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-01dccc48-c48f-4870-8972-b57f018d667a tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Deleting the datastore file [datastore2] 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 926.901303] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-99a3127e-ca3d-4869-8faa-64e9f03f15fc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.909272] env[62522]: DEBUG oslo_vmware.api [None req-01dccc48-c48f-4870-8972-b57f018d667a tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 926.909272] env[62522]: value = "task-2415714" [ 926.909272] env[62522]: _type = "Task" [ 926.909272] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.922911] env[62522]: DEBUG oslo_vmware.api [None req-01dccc48-c48f-4870-8972-b57f018d667a tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415714, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.985837] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-644f7590-9a88-4ad4-b0bd-8736a574b957 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.997580] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b9c0853-30dc-4164-a82b-c6dd7c9af497 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.033638] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2c83b5a-a464-4b29-aef3-bc896ccc1ae4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.043425] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4bd1c63-0b21-4a1f-b7e2-6eb6d1f7e3ad {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.058717] env[62522]: DEBUG nova.compute.provider_tree [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 927.067114] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "e1225c6f-9025-41ff-94fa-a55af49aeed2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.067354] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "e1225c6f-9025-41ff-94fa-a55af49aeed2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 927.097430] env[62522]: DEBUG oslo_vmware.api [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415712, 'name': Rename_Task, 'duration_secs': 0.173617} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.097793] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 927.098094] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d36d3acf-12ea-4885-a0c0-0c417cbe0dea {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.106937] env[62522]: DEBUG oslo_vmware.api [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for the task: (returnval){ [ 927.106937] env[62522]: value = "task-2415715" [ 927.106937] env[62522]: _type = "Task" [ 927.106937] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.115689] env[62522]: DEBUG oslo_vmware.api [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415715, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.225249] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.264293] env[62522]: DEBUG oslo_concurrency.lockutils [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Releasing lock "refresh_cache-8b21b749-b872-43f7-a2c5-aefee6c5f3a1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 927.264654] env[62522]: DEBUG nova.compute.manager [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Instance network_info: |[{"id": "808a9620-b31f-4e61-bb51-e2de4a3d3a7e", "address": "fa:16:3e:e1:dd:f7", "network": {"id": "be69fd15-aa3c-4e6e-9334-57674f1f2d81", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-543389568-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13a5a5169d8345a7a88fef5ff0ecd26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap808a9620-b3", "ovs_interfaceid": "808a9620-b31f-4e61-bb51-e2de4a3d3a7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 927.265193] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e1:dd:f7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abe48956-848a-4e1f-b1f1-a27baa5390b9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '808a9620-b31f-4e61-bb51-e2de4a3d3a7e', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 927.273089] env[62522]: DEBUG oslo.service.loopingcall [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 927.273250] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 927.273491] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3540742a-d243-4ab6-8e06-6cf0bbce87d3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.299732] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 927.299732] env[62522]: value = "task-2415716" [ 927.299732] env[62522]: _type = "Task" [ 927.299732] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.308917] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415716, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.423504] env[62522]: DEBUG oslo_vmware.api [None req-01dccc48-c48f-4870-8972-b57f018d667a tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415714, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.332087} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.423677] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-01dccc48-c48f-4870-8972-b57f018d667a tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 927.423865] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-01dccc48-c48f-4870-8972-b57f018d667a tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 927.424163] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-01dccc48-c48f-4870-8972-b57f018d667a tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 927.424305] env[62522]: INFO nova.compute.manager [None req-01dccc48-c48f-4870-8972-b57f018d667a tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Took 0.62 seconds to destroy the instance on the hypervisor. [ 927.424616] env[62522]: DEBUG oslo.service.loopingcall [None req-01dccc48-c48f-4870-8972-b57f018d667a tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 927.424864] env[62522]: DEBUG nova.compute.manager [-] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 927.424976] env[62522]: DEBUG nova.network.neutron [-] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 927.445567] env[62522]: DEBUG nova.compute.manager [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 927.471555] env[62522]: DEBUG nova.virt.hardware [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 927.471799] env[62522]: DEBUG nova.virt.hardware [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 927.471959] env[62522]: DEBUG nova.virt.hardware [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 927.472164] env[62522]: DEBUG nova.virt.hardware [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 927.472431] env[62522]: DEBUG nova.virt.hardware [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 927.472620] env[62522]: DEBUG nova.virt.hardware [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 927.472881] env[62522]: DEBUG nova.virt.hardware [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 927.472995] env[62522]: DEBUG nova.virt.hardware [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 927.473183] env[62522]: DEBUG nova.virt.hardware [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 927.473353] env[62522]: DEBUG nova.virt.hardware [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 927.473527] env[62522]: DEBUG nova.virt.hardware [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 927.474814] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35344dfc-3bff-4d0b-8e76-4db55cce4320 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.483567] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60771af9-f3f4-42dd-a685-d0d751f2ea0f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.564805] env[62522]: DEBUG nova.scheduler.client.report [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 927.619115] env[62522]: DEBUG oslo_vmware.api [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415715, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.662308] env[62522]: DEBUG nova.compute.manager [req-c741b858-b896-4829-af34-eead74e1a031 req-5af693b1-c5e2-40c2-970e-e811e6a96d0f service nova] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Received event network-changed-808a9620-b31f-4e61-bb51-e2de4a3d3a7e {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 927.662542] env[62522]: DEBUG nova.compute.manager [req-c741b858-b896-4829-af34-eead74e1a031 req-5af693b1-c5e2-40c2-970e-e811e6a96d0f service nova] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Refreshing instance network info cache due to event network-changed-808a9620-b31f-4e61-bb51-e2de4a3d3a7e. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 927.662828] env[62522]: DEBUG oslo_concurrency.lockutils [req-c741b858-b896-4829-af34-eead74e1a031 req-5af693b1-c5e2-40c2-970e-e811e6a96d0f service nova] Acquiring lock "refresh_cache-8b21b749-b872-43f7-a2c5-aefee6c5f3a1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 927.662988] env[62522]: DEBUG oslo_concurrency.lockutils [req-c741b858-b896-4829-af34-eead74e1a031 req-5af693b1-c5e2-40c2-970e-e811e6a96d0f service nova] Acquired lock "refresh_cache-8b21b749-b872-43f7-a2c5-aefee6c5f3a1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.663193] env[62522]: DEBUG nova.network.neutron [req-c741b858-b896-4829-af34-eead74e1a031 req-5af693b1-c5e2-40c2-970e-e811e6a96d0f service nova] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Refreshing network info cache for port 808a9620-b31f-4e61-bb51-e2de4a3d3a7e {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 927.769456] env[62522]: DEBUG nova.compute.manager [req-c6520c55-b78e-4a64-b6df-62023960ea6c req-eb3991a2-0264-46ad-9568-7c952d496e32 service nova] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Received event network-vif-deleted-cecb41e7-0c40-40fd-b130-fc0afe3fba0d {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 927.769686] env[62522]: INFO nova.compute.manager [req-c6520c55-b78e-4a64-b6df-62023960ea6c req-eb3991a2-0264-46ad-9568-7c952d496e32 service nova] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Neutron deleted interface cecb41e7-0c40-40fd-b130-fc0afe3fba0d; detaching it from the instance and deleting it from the info cache [ 927.769855] env[62522]: DEBUG nova.network.neutron [req-c6520c55-b78e-4a64-b6df-62023960ea6c req-eb3991a2-0264-46ad-9568-7c952d496e32 service nova] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.820006] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquiring lock "74e52638-d284-4bd1-8cff-c7aca9426f75-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.820754] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "74e52638-d284-4bd1-8cff-c7aca9426f75-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 927.820754] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "74e52638-d284-4bd1-8cff-c7aca9426f75-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 927.829252] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415716, 'name': CreateVM_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.069890] env[62522]: DEBUG oslo_concurrency.lockutils [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.648s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.070515] env[62522]: DEBUG nova.compute.manager [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 928.081026] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.363s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.081026] env[62522]: INFO nova.compute.claims [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 928.123784] env[62522]: DEBUG oslo_vmware.api [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415715, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.213692] env[62522]: DEBUG nova.network.neutron [-] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.271937] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-03d9b0dc-5e2f-4641-a08c-c1793186f98b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.284499] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66b153b9-44a8-43c3-9c15-b9a522958318 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.326014] env[62522]: DEBUG nova.compute.manager [req-c6520c55-b78e-4a64-b6df-62023960ea6c req-eb3991a2-0264-46ad-9568-7c952d496e32 service nova] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Detach interface failed, port_id=cecb41e7-0c40-40fd-b130-fc0afe3fba0d, reason: Instance 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 928.335756] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415716, 'name': CreateVM_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.369895] env[62522]: DEBUG nova.network.neutron [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Successfully updated port: 50a9aa3e-b35e-4feb-b010-5e72dd8c2252 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 928.380886] env[62522]: DEBUG nova.network.neutron [req-c741b858-b896-4829-af34-eead74e1a031 req-5af693b1-c5e2-40c2-970e-e811e6a96d0f service nova] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Updated VIF entry in instance network info cache for port 808a9620-b31f-4e61-bb51-e2de4a3d3a7e. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 928.380886] env[62522]: DEBUG nova.network.neutron [req-c741b858-b896-4829-af34-eead74e1a031 req-5af693b1-c5e2-40c2-970e-e811e6a96d0f service nova] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Updating instance_info_cache with network_info: [{"id": "808a9620-b31f-4e61-bb51-e2de4a3d3a7e", "address": "fa:16:3e:e1:dd:f7", "network": {"id": "be69fd15-aa3c-4e6e-9334-57674f1f2d81", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-543389568-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13a5a5169d8345a7a88fef5ff0ecd26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap808a9620-b3", "ovs_interfaceid": "808a9620-b31f-4e61-bb51-e2de4a3d3a7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.585481] env[62522]: DEBUG nova.compute.utils [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 928.588715] env[62522]: DEBUG nova.compute.manager [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 928.589081] env[62522]: DEBUG nova.network.neutron [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 928.618686] env[62522]: DEBUG oslo_vmware.api [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415715, 'name': PowerOnVM_Task, 'duration_secs': 1.160429} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.618947] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 928.619176] env[62522]: INFO nova.compute.manager [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Took 6.36 seconds to spawn the instance on the hypervisor. [ 928.619358] env[62522]: DEBUG nova.compute.manager [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 928.620165] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85f20be4-f048-4296-a484-ac50abbc6f41 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.637643] env[62522]: DEBUG nova.policy [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab5e5a8e6ee64aad8d52342ee3f5af36', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4bdd1f5caf09454d808bcdc15df2d3a7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 928.717127] env[62522]: INFO nova.compute.manager [-] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Took 1.29 seconds to deallocate network for instance. [ 928.840499] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415716, 'name': CreateVM_Task, 'duration_secs': 1.250062} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.840666] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 928.841381] env[62522]: DEBUG oslo_concurrency.lockutils [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 928.841558] env[62522]: DEBUG oslo_concurrency.lockutils [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.841906] env[62522]: DEBUG oslo_concurrency.lockutils [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 928.842173] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39b9eccd-da37-493e-8a51-fe6c9ca00330 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.847426] env[62522]: DEBUG oslo_vmware.api [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 928.847426] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52827bf5-7582-7afc-04e5-556c90583f93" [ 928.847426] env[62522]: _type = "Task" [ 928.847426] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.856222] env[62522]: DEBUG oslo_vmware.api [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52827bf5-7582-7afc-04e5-556c90583f93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.873993] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Acquiring lock "refresh_cache-0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 928.874205] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Acquired lock "refresh_cache-0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.874392] env[62522]: DEBUG nova.network.neutron [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 928.883778] env[62522]: DEBUG oslo_concurrency.lockutils [req-c741b858-b896-4829-af34-eead74e1a031 req-5af693b1-c5e2-40c2-970e-e811e6a96d0f service nova] Releasing lock "refresh_cache-8b21b749-b872-43f7-a2c5-aefee6c5f3a1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 928.902303] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquiring lock "refresh_cache-74e52638-d284-4bd1-8cff-c7aca9426f75" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 928.902475] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquired lock "refresh_cache-74e52638-d284-4bd1-8cff-c7aca9426f75" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.902719] env[62522]: DEBUG nova.network.neutron [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 928.954041] env[62522]: DEBUG nova.network.neutron [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Successfully created port: 954fee91-36f2-497a-a856-6828a519a456 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 929.089782] env[62522]: DEBUG nova.compute.manager [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 929.142588] env[62522]: INFO nova.compute.manager [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Took 53.13 seconds to build instance. [ 929.224161] env[62522]: DEBUG oslo_concurrency.lockutils [None req-01dccc48-c48f-4870-8972-b57f018d667a tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.364113] env[62522]: DEBUG oslo_vmware.api [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52827bf5-7582-7afc-04e5-556c90583f93, 'name': SearchDatastore_Task, 'duration_secs': 0.02809} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.364496] env[62522]: DEBUG oslo_concurrency.lockutils [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.364802] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 929.365120] env[62522]: DEBUG oslo_concurrency.lockutils [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.365321] env[62522]: DEBUG oslo_concurrency.lockutils [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.365557] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 929.365896] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c219f998-7a45-43cc-93a1-f12ee045ebdc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.380097] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 929.380273] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 929.381709] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eddfc713-92b2-4945-9bda-2d2f7f06fb36 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.403021] env[62522]: DEBUG oslo_vmware.api [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 929.403021] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52768b2f-7ec7-e0af-66f5-15089b71d6c5" [ 929.403021] env[62522]: _type = "Task" [ 929.403021] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.413652] env[62522]: DEBUG oslo_vmware.api [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52768b2f-7ec7-e0af-66f5-15089b71d6c5, 'name': SearchDatastore_Task, 'duration_secs': 0.014459} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.418335] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15cd3c44-bea0-4e96-99d0-645a15a557e7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.428295] env[62522]: DEBUG oslo_vmware.api [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 929.428295] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52137a35-705b-0094-4528-3459801f645b" [ 929.428295] env[62522]: _type = "Task" [ 929.428295] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.434787] env[62522]: DEBUG nova.network.neutron [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 929.440106] env[62522]: DEBUG oslo_vmware.api [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52137a35-705b-0094-4528-3459801f645b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.595023] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c5ba2e-479c-4860-ab42-b5afe3244fa5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.603967] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6a8c5b4-32c4-4548-a3fb-3962145d4a80 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.647505] env[62522]: DEBUG nova.network.neutron [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Updating instance_info_cache with network_info: [{"id": "50a9aa3e-b35e-4feb-b010-5e72dd8c2252", "address": "fa:16:3e:51:29:59", "network": {"id": "f2c8b263-09a3-413e-805d-6c242c8ea122", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-863952592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72db051c023d4293b22a8a58a517374c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50a9aa3e-b3", "ovs_interfaceid": "50a9aa3e-b35e-4feb-b010-5e72dd8c2252", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.650059] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4871e396-7fd3-4b41-b242-0bcc3884855d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.652853] env[62522]: DEBUG oslo_concurrency.lockutils [None req-810dc4df-e7c2-456d-bd2a-366a0576c67c tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Lock "72e054d2-79bb-4ef8-82d1-4e67ba0ef20a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.010s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.668654] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc76de02-3201-43e5-800f-eb05cbf0c203 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.685472] env[62522]: DEBUG nova.compute.provider_tree [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 929.700128] env[62522]: DEBUG nova.network.neutron [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Updating instance_info_cache with network_info: [{"id": "d830d64b-94fa-4bc8-a3e6-e45c4b0ae629", "address": "fa:16:3e:d4:80:4e", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.185", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd830d64b-94", "ovs_interfaceid": "d830d64b-94fa-4bc8-a3e6-e45c4b0ae629", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.754771] env[62522]: DEBUG nova.compute.manager [req-553769fc-a31a-4a69-a189-f412d32e85b1 req-a5ec8a9d-66e0-4d74-97ff-caee32903f2e service nova] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Received event network-vif-plugged-50a9aa3e-b35e-4feb-b010-5e72dd8c2252 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 929.755007] env[62522]: DEBUG oslo_concurrency.lockutils [req-553769fc-a31a-4a69-a189-f412d32e85b1 req-a5ec8a9d-66e0-4d74-97ff-caee32903f2e service nova] Acquiring lock "0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.755259] env[62522]: DEBUG oslo_concurrency.lockutils [req-553769fc-a31a-4a69-a189-f412d32e85b1 req-a5ec8a9d-66e0-4d74-97ff-caee32903f2e service nova] Lock "0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.755392] env[62522]: DEBUG oslo_concurrency.lockutils [req-553769fc-a31a-4a69-a189-f412d32e85b1 req-a5ec8a9d-66e0-4d74-97ff-caee32903f2e service nova] Lock "0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.755695] env[62522]: DEBUG nova.compute.manager [req-553769fc-a31a-4a69-a189-f412d32e85b1 req-a5ec8a9d-66e0-4d74-97ff-caee32903f2e service nova] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] No waiting events found dispatching network-vif-plugged-50a9aa3e-b35e-4feb-b010-5e72dd8c2252 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 929.755887] env[62522]: WARNING nova.compute.manager [req-553769fc-a31a-4a69-a189-f412d32e85b1 req-a5ec8a9d-66e0-4d74-97ff-caee32903f2e service nova] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Received unexpected event network-vif-plugged-50a9aa3e-b35e-4feb-b010-5e72dd8c2252 for instance with vm_state building and task_state spawning. [ 929.756077] env[62522]: DEBUG nova.compute.manager [req-553769fc-a31a-4a69-a189-f412d32e85b1 req-a5ec8a9d-66e0-4d74-97ff-caee32903f2e service nova] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Received event network-changed-50a9aa3e-b35e-4feb-b010-5e72dd8c2252 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 929.756266] env[62522]: DEBUG nova.compute.manager [req-553769fc-a31a-4a69-a189-f412d32e85b1 req-a5ec8a9d-66e0-4d74-97ff-caee32903f2e service nova] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Refreshing instance network info cache due to event network-changed-50a9aa3e-b35e-4feb-b010-5e72dd8c2252. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 929.756441] env[62522]: DEBUG oslo_concurrency.lockutils [req-553769fc-a31a-4a69-a189-f412d32e85b1 req-a5ec8a9d-66e0-4d74-97ff-caee32903f2e service nova] Acquiring lock "refresh_cache-0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.943630] env[62522]: DEBUG oslo_vmware.api [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52137a35-705b-0094-4528-3459801f645b, 'name': SearchDatastore_Task, 'duration_secs': 0.023335} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.943959] env[62522]: DEBUG oslo_concurrency.lockutils [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.944258] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 8b21b749-b872-43f7-a2c5-aefee6c5f3a1/8b21b749-b872-43f7-a2c5-aefee6c5f3a1.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 929.944536] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1ebea185-85f9-48db-8a0a-c82939620080 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.964076] env[62522]: DEBUG oslo_vmware.api [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 929.964076] env[62522]: value = "task-2415717" [ 929.964076] env[62522]: _type = "Task" [ 929.964076] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.978813] env[62522]: DEBUG oslo_vmware.api [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415717, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.103067] env[62522]: DEBUG nova.compute.manager [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 930.125824] env[62522]: DEBUG nova.virt.hardware [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 930.126909] env[62522]: DEBUG nova.virt.hardware [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 930.126909] env[62522]: DEBUG nova.virt.hardware [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 930.126909] env[62522]: DEBUG nova.virt.hardware [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 930.126909] env[62522]: DEBUG nova.virt.hardware [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 930.126909] env[62522]: DEBUG nova.virt.hardware [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 930.126909] env[62522]: DEBUG nova.virt.hardware [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 930.127221] env[62522]: DEBUG nova.virt.hardware [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 930.127259] env[62522]: DEBUG nova.virt.hardware [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 930.127452] env[62522]: DEBUG nova.virt.hardware [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 930.127657] env[62522]: DEBUG nova.virt.hardware [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 930.128642] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-839f2a9d-be11-4b3a-9deb-5824843c1199 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.137407] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fa4ae60-f3cc-4882-9a62-7f748de968a6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.153712] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Releasing lock "refresh_cache-0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 930.154074] env[62522]: DEBUG nova.compute.manager [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Instance network_info: |[{"id": "50a9aa3e-b35e-4feb-b010-5e72dd8c2252", "address": "fa:16:3e:51:29:59", "network": {"id": "f2c8b263-09a3-413e-805d-6c242c8ea122", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-863952592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72db051c023d4293b22a8a58a517374c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50a9aa3e-b3", "ovs_interfaceid": "50a9aa3e-b35e-4feb-b010-5e72dd8c2252", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 930.154621] env[62522]: DEBUG oslo_concurrency.lockutils [req-553769fc-a31a-4a69-a189-f412d32e85b1 req-a5ec8a9d-66e0-4d74-97ff-caee32903f2e service nova] Acquired lock "refresh_cache-0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.154805] env[62522]: DEBUG nova.network.neutron [req-553769fc-a31a-4a69-a189-f412d32e85b1 req-a5ec8a9d-66e0-4d74-97ff-caee32903f2e service nova] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Refreshing network info cache for port 50a9aa3e-b35e-4feb-b010-5e72dd8c2252 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 930.156090] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:29:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7188cb6-c9d7-449a-9362-ddb61a31feaf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '50a9aa3e-b35e-4feb-b010-5e72dd8c2252', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 930.163518] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Creating folder: Project (72db051c023d4293b22a8a58a517374c). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 930.164246] env[62522]: INFO nova.compute.manager [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Rebuilding instance [ 930.166325] env[62522]: DEBUG nova.compute.manager [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 930.169341] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b8ab71e5-7aa5-4987-9aa0-137d1c437d01 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.184310] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Created folder: Project (72db051c023d4293b22a8a58a517374c) in parent group-v489562. [ 930.184459] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Creating folder: Instances. Parent ref: group-v489751. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 930.186833] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f805c0c4-99fd-40eb-9fe3-0b59753c0d66 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.202864] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Releasing lock "refresh_cache-74e52638-d284-4bd1-8cff-c7aca9426f75" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 930.212029] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Created folder: Instances in parent group-v489751. [ 930.212029] env[62522]: DEBUG oslo.service.loopingcall [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 930.212029] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 930.212029] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a1ec6cb7-4d21-4ed1-b467-af84a97b930e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.230687] env[62522]: ERROR nova.scheduler.client.report [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [req-00dbe89b-729c-40ab-8a2a-dd06b06173fb] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c7fa38b2-245d-4337-a012-22c1a01c0a72. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-00dbe89b-729c-40ab-8a2a-dd06b06173fb"}]} [ 930.240401] env[62522]: DEBUG nova.compute.manager [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 930.241422] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aa98aed-c211-4d0e-97bb-32a8457775e6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.247083] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 930.247083] env[62522]: value = "task-2415720" [ 930.247083] env[62522]: _type = "Task" [ 930.247083] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.264019] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415720, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.265985] env[62522]: DEBUG nova.scheduler.client.report [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Refreshing inventories for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 930.283760] env[62522]: DEBUG nova.scheduler.client.report [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Updating ProviderTree inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 930.284582] env[62522]: DEBUG nova.compute.provider_tree [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 930.299760] env[62522]: DEBUG nova.scheduler.client.report [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Refreshing aggregate associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, aggregates: None {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 930.324619] env[62522]: DEBUG nova.scheduler.client.report [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Refreshing trait associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 930.477727] env[62522]: DEBUG oslo_vmware.api [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415717, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.514923] env[62522]: DEBUG nova.network.neutron [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Successfully updated port: 954fee91-36f2-497a-a856-6828a519a456 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 930.574771] env[62522]: DEBUG nova.compute.manager [req-1ba7f443-b33e-4638-aa7d-38ebecea0409 req-939d7c14-aaf7-4f42-a1d3-fded9a2f95a8 service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Received event network-vif-plugged-954fee91-36f2-497a-a856-6828a519a456 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 930.574954] env[62522]: DEBUG oslo_concurrency.lockutils [req-1ba7f443-b33e-4638-aa7d-38ebecea0409 req-939d7c14-aaf7-4f42-a1d3-fded9a2f95a8 service nova] Acquiring lock "fcd0eef6-d059-4495-a982-058b6c9626d1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 930.575223] env[62522]: DEBUG oslo_concurrency.lockutils [req-1ba7f443-b33e-4638-aa7d-38ebecea0409 req-939d7c14-aaf7-4f42-a1d3-fded9a2f95a8 service nova] Lock "fcd0eef6-d059-4495-a982-058b6c9626d1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.575402] env[62522]: DEBUG oslo_concurrency.lockutils [req-1ba7f443-b33e-4638-aa7d-38ebecea0409 req-939d7c14-aaf7-4f42-a1d3-fded9a2f95a8 service nova] Lock "fcd0eef6-d059-4495-a982-058b6c9626d1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 930.575566] env[62522]: DEBUG nova.compute.manager [req-1ba7f443-b33e-4638-aa7d-38ebecea0409 req-939d7c14-aaf7-4f42-a1d3-fded9a2f95a8 service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] No waiting events found dispatching network-vif-plugged-954fee91-36f2-497a-a856-6828a519a456 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 930.575728] env[62522]: WARNING nova.compute.manager [req-1ba7f443-b33e-4638-aa7d-38ebecea0409 req-939d7c14-aaf7-4f42-a1d3-fded9a2f95a8 service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Received unexpected event network-vif-plugged-954fee91-36f2-497a-a856-6828a519a456 for instance with vm_state building and task_state spawning. [ 930.690155] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 930.730310] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e8df840-a080-447e-bee1-43c637d168b5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.761701] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e72c45be-711e-4c78-a3ee-2fd0bfe81d34 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.772832] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Updating instance '74e52638-d284-4bd1-8cff-c7aca9426f75' progress to 83 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 930.781172] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415720, 'name': CreateVM_Task, 'duration_secs': 0.450812} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.783847] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 930.785917] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 930.785917] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.785917] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 930.787895] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92599188-0707-44da-b6b0-7079196d9171 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.794867] env[62522]: DEBUG oslo_vmware.api [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Waiting for the task: (returnval){ [ 930.794867] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a867bb-4683-f82c-65c8-bb78f6986f8f" [ 930.794867] env[62522]: _type = "Task" [ 930.794867] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.808207] env[62522]: DEBUG oslo_vmware.api [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a867bb-4683-f82c-65c8-bb78f6986f8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.872529] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcb1a5a5-1a2a-4c99-89c7-922162121581 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.880864] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e72da36d-473f-44dc-b348-8414527d3de6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.916277] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e9b4cd5-b1ad-469c-8fb1-119aabd45bc0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.925028] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e6d4a3-e636-4ee1-a0be-0dcb741282ff {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.940647] env[62522]: DEBUG nova.compute.provider_tree [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 930.957936] env[62522]: DEBUG nova.network.neutron [req-553769fc-a31a-4a69-a189-f412d32e85b1 req-a5ec8a9d-66e0-4d74-97ff-caee32903f2e service nova] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Updated VIF entry in instance network info cache for port 50a9aa3e-b35e-4feb-b010-5e72dd8c2252. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 930.958356] env[62522]: DEBUG nova.network.neutron [req-553769fc-a31a-4a69-a189-f412d32e85b1 req-a5ec8a9d-66e0-4d74-97ff-caee32903f2e service nova] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Updating instance_info_cache with network_info: [{"id": "50a9aa3e-b35e-4feb-b010-5e72dd8c2252", "address": "fa:16:3e:51:29:59", "network": {"id": "f2c8b263-09a3-413e-805d-6c242c8ea122", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-863952592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72db051c023d4293b22a8a58a517374c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50a9aa3e-b3", "ovs_interfaceid": "50a9aa3e-b35e-4feb-b010-5e72dd8c2252", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.976489] env[62522]: DEBUG oslo_vmware.api [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415717, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.562319} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.976768] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 8b21b749-b872-43f7-a2c5-aefee6c5f3a1/8b21b749-b872-43f7-a2c5-aefee6c5f3a1.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 930.976989] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 930.979214] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-17a17555-2fa4-425a-b184-9ddfef2db8e3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.987319] env[62522]: DEBUG oslo_vmware.api [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 930.987319] env[62522]: value = "task-2415721" [ 930.987319] env[62522]: _type = "Task" [ 930.987319] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.992958] env[62522]: DEBUG oslo_vmware.rw_handles [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fe1ef7-6923-016a-4acd-d4651b549f6f/disk-0.vmdk. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 930.994283] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0faf1b11-c26d-4c31-98c2-e94b816a6eee {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.999604] env[62522]: DEBUG oslo_vmware.api [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415721, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.003451] env[62522]: DEBUG oslo_vmware.rw_handles [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fe1ef7-6923-016a-4acd-d4651b549f6f/disk-0.vmdk is in state: ready. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 931.003641] env[62522]: ERROR oslo_vmware.rw_handles [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fe1ef7-6923-016a-4acd-d4651b549f6f/disk-0.vmdk due to incomplete transfer. [ 931.003886] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d49d204c-d71c-4712-8ff6-09722fb23cf3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.011584] env[62522]: DEBUG oslo_vmware.rw_handles [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fe1ef7-6923-016a-4acd-d4651b549f6f/disk-0.vmdk. {{(pid=62522) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 931.011871] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Uploaded image a208a1a2-a7f4-4e1b-ac0b-c961a531fddd to the Glance image server {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 931.013654] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Destroying the VM {{(pid=62522) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 931.013926] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-bc441810-ace6-4e4a-989a-d6d9eaa2c4ff {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.017209] env[62522]: DEBUG oslo_concurrency.lockutils [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "refresh_cache-fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 931.017295] env[62522]: DEBUG oslo_concurrency.lockutils [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired lock "refresh_cache-fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.017406] env[62522]: DEBUG nova.network.neutron [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 931.020626] env[62522]: DEBUG oslo_vmware.api [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the task: (returnval){ [ 931.020626] env[62522]: value = "task-2415722" [ 931.020626] env[62522]: _type = "Task" [ 931.020626] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.031213] env[62522]: DEBUG oslo_vmware.api [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415722, 'name': Destroy_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.267809] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 931.268106] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-665a86f5-9b7a-47bc-bb13-2a2669b1f0e7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.277819] env[62522]: DEBUG oslo_vmware.api [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for the task: (returnval){ [ 931.277819] env[62522]: value = "task-2415723" [ 931.277819] env[62522]: _type = "Task" [ 931.277819] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.286393] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 931.290850] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-11d33f82-7232-4197-8fa1-4edaf1e32828 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.292991] env[62522]: DEBUG oslo_vmware.api [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415723, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.300309] env[62522]: DEBUG oslo_vmware.api [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 931.300309] env[62522]: value = "task-2415724" [ 931.300309] env[62522]: _type = "Task" [ 931.300309] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.309465] env[62522]: DEBUG oslo_vmware.api [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a867bb-4683-f82c-65c8-bb78f6986f8f, 'name': SearchDatastore_Task, 'duration_secs': 0.012335} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.310158] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.310439] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 931.310698] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 931.310865] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.311062] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 931.311333] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e49d1103-d817-4fc7-be14-39a14062fede {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.315975] env[62522]: DEBUG oslo_vmware.api [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415724, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.324650] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 931.324840] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 931.325643] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2081fa2e-00aa-4f9a-b855-a96d0e676a12 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.331328] env[62522]: DEBUG oslo_vmware.api [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Waiting for the task: (returnval){ [ 931.331328] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52343613-8afa-a928-3b1d-412613c4517c" [ 931.331328] env[62522]: _type = "Task" [ 931.331328] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.340242] env[62522]: DEBUG oslo_vmware.api [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52343613-8afa-a928-3b1d-412613c4517c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.460808] env[62522]: DEBUG oslo_concurrency.lockutils [req-553769fc-a31a-4a69-a189-f412d32e85b1 req-a5ec8a9d-66e0-4d74-97ff-caee32903f2e service nova] Releasing lock "refresh_cache-0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.478356] env[62522]: DEBUG nova.scheduler.client.report [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Updated inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with generation 98 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 931.478632] env[62522]: DEBUG nova.compute.provider_tree [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Updating resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 generation from 98 to 99 during operation: update_inventory {{(pid=62522) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 931.478828] env[62522]: DEBUG nova.compute.provider_tree [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 931.497458] env[62522]: DEBUG oslo_vmware.api [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415721, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073857} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.497726] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 931.498551] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f524bf1a-eee1-4e9e-b085-e97ff5a236b9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.522205] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] 8b21b749-b872-43f7-a2c5-aefee6c5f3a1/8b21b749-b872-43f7-a2c5-aefee6c5f3a1.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 931.524993] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe69e3dc-5fc6-42b8-96df-3c75eb7f9374 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.549219] env[62522]: DEBUG oslo_vmware.api [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415722, 'name': Destroy_Task} progress is 33%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.550788] env[62522]: DEBUG oslo_vmware.api [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 931.550788] env[62522]: value = "task-2415725" [ 931.550788] env[62522]: _type = "Task" [ 931.550788] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.559230] env[62522]: DEBUG oslo_vmware.api [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415725, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.571171] env[62522]: DEBUG nova.network.neutron [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 931.724366] env[62522]: DEBUG nova.network.neutron [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Updating instance_info_cache with network_info: [{"id": "954fee91-36f2-497a-a856-6828a519a456", "address": "fa:16:3e:df:f4:48", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap954fee91-36", "ovs_interfaceid": "954fee91-36f2-497a-a856-6828a519a456", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.788634] env[62522]: DEBUG oslo_vmware.api [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415723, 'name': PowerOffVM_Task, 'duration_secs': 0.136841} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.788940] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 931.789192] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 931.790015] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1efcd61d-2e50-434f-b1a8-b2b36c81230b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.798346] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 931.798600] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b47ec62c-12ac-4fa5-ad7c-c2e46f965dff {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.809922] env[62522]: DEBUG oslo_vmware.api [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415724, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.828438] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 931.828699] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 931.828900] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Deleting the datastore file [datastore1] 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 931.829208] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b69d1a40-1544-4597-9557-150134d423b0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.836697] env[62522]: DEBUG oslo_vmware.api [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for the task: (returnval){ [ 931.836697] env[62522]: value = "task-2415727" [ 931.836697] env[62522]: _type = "Task" [ 931.836697] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.843177] env[62522]: DEBUG oslo_vmware.api [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52343613-8afa-a928-3b1d-412613c4517c, 'name': SearchDatastore_Task, 'duration_secs': 0.013227} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.844385] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46edf770-36b4-4015-9619-142dee4cfadd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.849585] env[62522]: DEBUG oslo_vmware.api [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415727, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.853676] env[62522]: DEBUG oslo_vmware.api [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Waiting for the task: (returnval){ [ 931.853676] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52683154-d7b6-f361-66f8-70cbb7937f4b" [ 931.853676] env[62522]: _type = "Task" [ 931.853676] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.861772] env[62522]: DEBUG oslo_vmware.api [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52683154-d7b6-f361-66f8-70cbb7937f4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.984561] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.907s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.985135] env[62522]: DEBUG nova.compute.manager [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 931.989014] env[62522]: DEBUG oslo_concurrency.lockutils [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.835s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 931.989155] env[62522]: DEBUG nova.objects.instance [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Lazy-loading 'resources' on Instance uuid cce5f0d4-364d-4295-a27d-44ca8585f803 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 932.037465] env[62522]: DEBUG oslo_vmware.api [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415722, 'name': Destroy_Task, 'duration_secs': 0.630768} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.038325] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Destroyed the VM [ 932.038578] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Deleting Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 932.038869] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-d6b0d57d-394c-4823-ad05-56694e6bc048 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.048320] env[62522]: DEBUG oslo_vmware.api [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the task: (returnval){ [ 932.048320] env[62522]: value = "task-2415728" [ 932.048320] env[62522]: _type = "Task" [ 932.048320] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.061373] env[62522]: DEBUG oslo_vmware.api [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415728, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.064875] env[62522]: DEBUG oslo_vmware.api [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415725, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.227110] env[62522]: DEBUG oslo_concurrency.lockutils [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Releasing lock "refresh_cache-fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 932.227514] env[62522]: DEBUG nova.compute.manager [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Instance network_info: |[{"id": "954fee91-36f2-497a-a856-6828a519a456", "address": "fa:16:3e:df:f4:48", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap954fee91-36", "ovs_interfaceid": "954fee91-36f2-497a-a856-6828a519a456", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 932.228023] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:f4:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee555dfd-3d1a-4220-89cd-ffba64e4acf0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '954fee91-36f2-497a-a856-6828a519a456', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 932.235852] env[62522]: DEBUG oslo.service.loopingcall [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 932.236107] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 932.236340] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d1211603-2ca5-4c80-bf4e-41f636fab91d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.256701] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 932.256701] env[62522]: value = "task-2415729" [ 932.256701] env[62522]: _type = "Task" [ 932.256701] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.265239] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415729, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.311439] env[62522]: DEBUG oslo_vmware.api [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415724, 'name': PowerOnVM_Task, 'duration_secs': 0.723032} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.311727] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 932.311966] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ce043770-82c9-4ee6-b535-d9218f560880 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Updating instance '74e52638-d284-4bd1-8cff-c7aca9426f75' progress to 100 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 932.347183] env[62522]: DEBUG oslo_vmware.api [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415727, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.199927} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.347451] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 932.347635] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 932.347819] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 932.365176] env[62522]: DEBUG oslo_vmware.api [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52683154-d7b6-f361-66f8-70cbb7937f4b, 'name': SearchDatastore_Task, 'duration_secs': 0.028476} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.365176] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 932.365376] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c/0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 932.365635] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0660a210-b472-4d6f-adf9-f653e87cb0ed {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.374390] env[62522]: DEBUG oslo_vmware.api [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Waiting for the task: (returnval){ [ 932.374390] env[62522]: value = "task-2415730" [ 932.374390] env[62522]: _type = "Task" [ 932.374390] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.384275] env[62522]: DEBUG oslo_vmware.api [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Task: {'id': task-2415730, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.492645] env[62522]: DEBUG nova.compute.utils [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 932.497276] env[62522]: DEBUG nova.compute.manager [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 932.497448] env[62522]: DEBUG nova.network.neutron [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 932.542819] env[62522]: DEBUG nova.policy [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2915209e81104e9eb3f4446a9d85fd57', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3ca7e42d226a4ef6b48b882356da8950', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 932.567599] env[62522]: DEBUG oslo_vmware.api [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415728, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.567860] env[62522]: DEBUG oslo_vmware.api [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415725, 'name': ReconfigVM_Task, 'duration_secs': 0.68404} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.568121] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Reconfigured VM instance instance-00000043 to attach disk [datastore1] 8b21b749-b872-43f7-a2c5-aefee6c5f3a1/8b21b749-b872-43f7-a2c5-aefee6c5f3a1.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 932.568789] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3e6e6e6c-8fc5-438b-b6cf-21db8826aa2f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.590469] env[62522]: DEBUG oslo_vmware.api [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 932.590469] env[62522]: value = "task-2415731" [ 932.590469] env[62522]: _type = "Task" [ 932.590469] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.602417] env[62522]: DEBUG oslo_vmware.api [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415731, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.615988] env[62522]: DEBUG nova.compute.manager [req-1df9480a-54a9-4951-bf5c-5bae93104d47 req-06846518-2c9d-491a-be9b-5e5e71fad42f service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Received event network-changed-954fee91-36f2-497a-a856-6828a519a456 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 932.616223] env[62522]: DEBUG nova.compute.manager [req-1df9480a-54a9-4951-bf5c-5bae93104d47 req-06846518-2c9d-491a-be9b-5e5e71fad42f service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Refreshing instance network info cache due to event network-changed-954fee91-36f2-497a-a856-6828a519a456. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 932.616467] env[62522]: DEBUG oslo_concurrency.lockutils [req-1df9480a-54a9-4951-bf5c-5bae93104d47 req-06846518-2c9d-491a-be9b-5e5e71fad42f service nova] Acquiring lock "refresh_cache-fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.616618] env[62522]: DEBUG oslo_concurrency.lockutils [req-1df9480a-54a9-4951-bf5c-5bae93104d47 req-06846518-2c9d-491a-be9b-5e5e71fad42f service nova] Acquired lock "refresh_cache-fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.616948] env[62522]: DEBUG nova.network.neutron [req-1df9480a-54a9-4951-bf5c-5bae93104d47 req-06846518-2c9d-491a-be9b-5e5e71fad42f service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Refreshing network info cache for port 954fee91-36f2-497a-a856-6828a519a456 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 932.768172] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415729, 'name': CreateVM_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.895336] env[62522]: DEBUG oslo_vmware.api [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Task: {'id': task-2415730, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.896460] env[62522]: DEBUG nova.network.neutron [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Successfully created port: 5ede0b01-ce31-4403-9ce2-41d300d0c750 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 932.997777] env[62522]: DEBUG nova.compute.manager [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 933.059890] env[62522]: DEBUG oslo_vmware.api [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415728, 'name': RemoveSnapshot_Task, 'duration_secs': 0.647595} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.059890] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Deleted Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 933.059890] env[62522]: INFO nova.compute.manager [None req-a8533efa-a18c-4851-88ce-7340728dfd7b tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Took 15.80 seconds to snapshot the instance on the hypervisor. [ 933.063736] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c263a51b-3351-4093-9071-f5928061f1e6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.071814] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaa58797-8989-495d-9de0-01e14da8b61f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.109340] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e582f9e-8ad8-4a45-a216-7d39f221e712 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.119653] env[62522]: DEBUG oslo_vmware.api [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415731, 'name': Rename_Task, 'duration_secs': 0.291256} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.120287] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 933.121513] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c4abf49-a0cb-4c26-94b0-8e108821d039 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.127166] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c74ea7e5-89e9-40e9-bd8f-13e075c4822b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.136384] env[62522]: DEBUG oslo_vmware.api [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 933.136384] env[62522]: value = "task-2415732" [ 933.136384] env[62522]: _type = "Task" [ 933.136384] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.146263] env[62522]: DEBUG nova.compute.provider_tree [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 933.158260] env[62522]: DEBUG oslo_vmware.api [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415732, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.244387] env[62522]: DEBUG nova.network.neutron [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Successfully created port: cac77d67-105b-49eb-9b57-8c60abb3165a {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 933.268801] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415729, 'name': CreateVM_Task, 'duration_secs': 0.748572} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.269095] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 933.269827] env[62522]: DEBUG oslo_concurrency.lockutils [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 933.270046] env[62522]: DEBUG oslo_concurrency.lockutils [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.270538] env[62522]: DEBUG oslo_concurrency.lockutils [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 933.272875] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29fbef63-6209-48da-afe2-ca4222cfda0a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.278431] env[62522]: DEBUG oslo_vmware.api [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 933.278431] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529ef161-ab71-aa2c-12b5-dc0f01ab7b52" [ 933.278431] env[62522]: _type = "Task" [ 933.278431] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.287853] env[62522]: DEBUG oslo_vmware.api [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529ef161-ab71-aa2c-12b5-dc0f01ab7b52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.393134] env[62522]: DEBUG oslo_vmware.api [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Task: {'id': task-2415730, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.604487} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.397398] env[62522]: DEBUG nova.virt.hardware [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 933.398069] env[62522]: DEBUG nova.virt.hardware [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 933.398221] env[62522]: DEBUG nova.virt.hardware [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 933.398880] env[62522]: DEBUG nova.virt.hardware [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 933.399116] env[62522]: DEBUG nova.virt.hardware [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 933.399282] env[62522]: DEBUG nova.virt.hardware [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 933.399499] env[62522]: DEBUG nova.virt.hardware [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 933.399662] env[62522]: DEBUG nova.virt.hardware [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 933.399833] env[62522]: DEBUG nova.virt.hardware [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 933.399994] env[62522]: DEBUG nova.virt.hardware [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 933.400506] env[62522]: DEBUG nova.virt.hardware [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 933.400984] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c/0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 933.401779] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 933.404231] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de00be80-c69e-405d-9897-0e76857d6623 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.409501] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a436e3aa-e5d0-4968-8006-1c567dbca3c2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.421069] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be005b18-8baf-496c-8b2b-3b8282de1137 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.426099] env[62522]: DEBUG oslo_vmware.api [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Waiting for the task: (returnval){ [ 933.426099] env[62522]: value = "task-2415733" [ 933.426099] env[62522]: _type = "Task" [ 933.426099] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.439025] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Instance VIF info [] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 933.443288] env[62522]: DEBUG oslo.service.loopingcall [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 933.446326] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 933.446567] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-663679d6-2452-46ff-a44e-d79ac911ff47 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.463420] env[62522]: DEBUG oslo_vmware.api [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Task: {'id': task-2415733, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.470309] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 933.470309] env[62522]: value = "task-2415734" [ 933.470309] env[62522]: _type = "Task" [ 933.470309] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.479097] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415734, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.521484] env[62522]: DEBUG nova.network.neutron [req-1df9480a-54a9-4951-bf5c-5bae93104d47 req-06846518-2c9d-491a-be9b-5e5e71fad42f service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Updated VIF entry in instance network info cache for port 954fee91-36f2-497a-a856-6828a519a456. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 933.521867] env[62522]: DEBUG nova.network.neutron [req-1df9480a-54a9-4951-bf5c-5bae93104d47 req-06846518-2c9d-491a-be9b-5e5e71fad42f service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Updating instance_info_cache with network_info: [{"id": "954fee91-36f2-497a-a856-6828a519a456", "address": "fa:16:3e:df:f4:48", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap954fee91-36", "ovs_interfaceid": "954fee91-36f2-497a-a856-6828a519a456", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.660936] env[62522]: DEBUG oslo_vmware.api [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415732, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.688778] env[62522]: DEBUG nova.scheduler.client.report [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Updated inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with generation 99 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 933.689018] env[62522]: DEBUG nova.compute.provider_tree [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Updating resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 generation from 99 to 100 during operation: update_inventory {{(pid=62522) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 933.689229] env[62522]: DEBUG nova.compute.provider_tree [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 933.789914] env[62522]: DEBUG oslo_vmware.api [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529ef161-ab71-aa2c-12b5-dc0f01ab7b52, 'name': SearchDatastore_Task, 'duration_secs': 0.017103} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.790295] env[62522]: DEBUG oslo_concurrency.lockutils [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.790599] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 933.790890] env[62522]: DEBUG oslo_concurrency.lockutils [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 933.791105] env[62522]: DEBUG oslo_concurrency.lockutils [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.791357] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 933.791711] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f3e5026e-169e-48de-b833-9022dcfd58d5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.804658] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 933.804958] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 933.805894] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e308632-2fec-4bc1-9df0-17184982ffd8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.811909] env[62522]: DEBUG oslo_vmware.api [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 933.811909] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52fc39be-8ca3-65d6-fc2d-1571f53b1fb0" [ 933.811909] env[62522]: _type = "Task" [ 933.811909] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.820602] env[62522]: DEBUG oslo_vmware.api [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52fc39be-8ca3-65d6-fc2d-1571f53b1fb0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.936581] env[62522]: DEBUG oslo_vmware.api [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Task: {'id': task-2415733, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.154145} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.936872] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 933.937682] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33655df4-375a-4439-b526-fa46a6cb91c8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.961056] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c/0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 933.961405] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e1174075-ea3b-41a9-b60b-c9e291e43803 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.985555] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415734, 'name': CreateVM_Task, 'duration_secs': 0.437366} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.986762] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 933.987128] env[62522]: DEBUG oslo_vmware.api [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Waiting for the task: (returnval){ [ 933.987128] env[62522]: value = "task-2415735" [ 933.987128] env[62522]: _type = "Task" [ 933.987128] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.987512] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 933.987675] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.987988] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 933.988353] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5b159c1-f0b9-4bd1-921c-c57682a120af {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.998261] env[62522]: DEBUG oslo_vmware.api [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Task: {'id': task-2415735, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.999451] env[62522]: DEBUG oslo_vmware.api [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for the task: (returnval){ [ 933.999451] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520b0c3b-56e1-202e-6404-85ae08048031" [ 933.999451] env[62522]: _type = "Task" [ 933.999451] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.009715] env[62522]: DEBUG nova.compute.manager [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 934.010911] env[62522]: DEBUG oslo_vmware.api [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520b0c3b-56e1-202e-6404-85ae08048031, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.026155] env[62522]: DEBUG oslo_concurrency.lockutils [req-1df9480a-54a9-4951-bf5c-5bae93104d47 req-06846518-2c9d-491a-be9b-5e5e71fad42f service nova] Releasing lock "refresh_cache-fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 934.040986] env[62522]: DEBUG nova.virt.hardware [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 934.041283] env[62522]: DEBUG nova.virt.hardware [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 934.041504] env[62522]: DEBUG nova.virt.hardware [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 934.041736] env[62522]: DEBUG nova.virt.hardware [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 934.041926] env[62522]: DEBUG nova.virt.hardware [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 934.042116] env[62522]: DEBUG nova.virt.hardware [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 934.042394] env[62522]: DEBUG nova.virt.hardware [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 934.042612] env[62522]: DEBUG nova.virt.hardware [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 934.042821] env[62522]: DEBUG nova.virt.hardware [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 934.043446] env[62522]: DEBUG nova.virt.hardware [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 934.043446] env[62522]: DEBUG nova.virt.hardware [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 934.044383] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3803f4f8-ed90-4faf-bf00-817bbb87b3de {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.055485] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a207474-d61e-4b1d-807a-e64bd1688a3d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.157647] env[62522]: DEBUG oslo_vmware.api [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415732, 'name': PowerOnVM_Task, 'duration_secs': 0.945482} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.158019] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 934.158275] env[62522]: INFO nova.compute.manager [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Took 9.27 seconds to spawn the instance on the hypervisor. [ 934.158505] env[62522]: DEBUG nova.compute.manager [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 934.159722] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-817af4c5-f9fe-412c-88bd-064767359bbc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.195562] env[62522]: DEBUG oslo_concurrency.lockutils [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.207s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.198044] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 34.380s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.198249] env[62522]: DEBUG nova.objects.instance [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62522) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 934.231058] env[62522]: INFO nova.scheduler.client.report [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Deleted allocations for instance cce5f0d4-364d-4295-a27d-44ca8585f803 [ 934.323632] env[62522]: DEBUG oslo_vmware.api [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52fc39be-8ca3-65d6-fc2d-1571f53b1fb0, 'name': SearchDatastore_Task, 'duration_secs': 0.024554} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.324481] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8b9c709-7041-490b-8efe-f497842f2d59 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.330720] env[62522]: DEBUG oslo_vmware.api [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 934.330720] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52932bc5-fbf8-996c-3c5c-0560076a7dcd" [ 934.330720] env[62522]: _type = "Task" [ 934.330720] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.339284] env[62522]: DEBUG oslo_vmware.api [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52932bc5-fbf8-996c-3c5c-0560076a7dcd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.499479] env[62522]: DEBUG oslo_vmware.api [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Task: {'id': task-2415735, 'name': ReconfigVM_Task, 'duration_secs': 0.345515} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.499956] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Reconfigured VM instance instance-00000044 to attach disk [datastore2] 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c/0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 934.504599] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d12bd0cd-f3fd-42d7-88f3-1f4402b1339b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.513043] env[62522]: DEBUG oslo_vmware.api [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520b0c3b-56e1-202e-6404-85ae08048031, 'name': SearchDatastore_Task, 'duration_secs': 0.019855} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.514458] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 934.514705] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 934.514915] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 934.515222] env[62522]: DEBUG oslo_vmware.api [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Waiting for the task: (returnval){ [ 934.515222] env[62522]: value = "task-2415736" [ 934.515222] env[62522]: _type = "Task" [ 934.515222] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.524080] env[62522]: DEBUG oslo_vmware.api [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Task: {'id': task-2415736, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.688933] env[62522]: INFO nova.compute.manager [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Took 54.73 seconds to build instance. [ 934.741098] env[62522]: DEBUG oslo_concurrency.lockutils [None req-72637f6f-3b91-40d0-a94d-163719fdb9bc tempest-SecurityGroupsTestJSON-1761875515 tempest-SecurityGroupsTestJSON-1761875515-project-member] Lock "cce5f0d4-364d-4295-a27d-44ca8585f803" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.124s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.845232] env[62522]: DEBUG oslo_vmware.api [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52932bc5-fbf8-996c-3c5c-0560076a7dcd, 'name': SearchDatastore_Task, 'duration_secs': 0.054399} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.845232] env[62522]: DEBUG oslo_concurrency.lockutils [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 934.845232] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] fcd0eef6-d059-4495-a982-058b6c9626d1/fcd0eef6-d059-4495-a982-058b6c9626d1.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 934.845232] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.845232] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 934.845232] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0e75f461-851f-41df-879e-4cd11ac91347 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.848042] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fa905711-10f7-4ad3-9fd5-7955abb21699 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.856127] env[62522]: DEBUG oslo_vmware.api [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 934.856127] env[62522]: value = "task-2415737" [ 934.856127] env[62522]: _type = "Task" [ 934.856127] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.860397] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 934.860651] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 934.861712] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbcd6038-86ae-4233-8f72-0211d62bdcd4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.870878] env[62522]: DEBUG oslo_vmware.api [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415737, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.876133] env[62522]: DEBUG nova.network.neutron [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Port d830d64b-94fa-4bc8-a3e6-e45c4b0ae629 binding to destination host cpu-1 is already ACTIVE {{(pid=62522) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 934.876398] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquiring lock "refresh_cache-74e52638-d284-4bd1-8cff-c7aca9426f75" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 934.876549] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquired lock "refresh_cache-74e52638-d284-4bd1-8cff-c7aca9426f75" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.876727] env[62522]: DEBUG nova.network.neutron [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 934.878087] env[62522]: DEBUG oslo_vmware.api [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for the task: (returnval){ [ 934.878087] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5211fee1-9ac0-bade-5ddd-a6e5694d84cf" [ 934.878087] env[62522]: _type = "Task" [ 934.878087] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.888075] env[62522]: DEBUG oslo_vmware.api [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5211fee1-9ac0-bade-5ddd-a6e5694d84cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.919863] env[62522]: DEBUG nova.compute.manager [req-ea816037-24e1-4b7a-9186-a413742f020f req-30598cf1-b5c4-4a58-b430-053b636fce76 service nova] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Received event network-vif-plugged-5ede0b01-ce31-4403-9ce2-41d300d0c750 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 934.920100] env[62522]: DEBUG oslo_concurrency.lockutils [req-ea816037-24e1-4b7a-9186-a413742f020f req-30598cf1-b5c4-4a58-b430-053b636fce76 service nova] Acquiring lock "17ec01e7-9735-4771-a73c-c4c7634d59f1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.920353] env[62522]: DEBUG oslo_concurrency.lockutils [req-ea816037-24e1-4b7a-9186-a413742f020f req-30598cf1-b5c4-4a58-b430-053b636fce76 service nova] Lock "17ec01e7-9735-4771-a73c-c4c7634d59f1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.920537] env[62522]: DEBUG oslo_concurrency.lockutils [req-ea816037-24e1-4b7a-9186-a413742f020f req-30598cf1-b5c4-4a58-b430-053b636fce76 service nova] Lock "17ec01e7-9735-4771-a73c-c4c7634d59f1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.920793] env[62522]: DEBUG nova.compute.manager [req-ea816037-24e1-4b7a-9186-a413742f020f req-30598cf1-b5c4-4a58-b430-053b636fce76 service nova] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] No waiting events found dispatching network-vif-plugged-5ede0b01-ce31-4403-9ce2-41d300d0c750 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 934.920964] env[62522]: WARNING nova.compute.manager [req-ea816037-24e1-4b7a-9186-a413742f020f req-30598cf1-b5c4-4a58-b430-053b636fce76 service nova] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Received unexpected event network-vif-plugged-5ede0b01-ce31-4403-9ce2-41d300d0c750 for instance with vm_state building and task_state spawning. [ 935.005066] env[62522]: DEBUG nova.network.neutron [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Successfully updated port: 5ede0b01-ce31-4403-9ce2-41d300d0c750 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 935.026357] env[62522]: DEBUG oslo_vmware.api [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Task: {'id': task-2415736, 'name': Rename_Task, 'duration_secs': 0.157567} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.026610] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 935.027096] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-90b65fd4-87c6-472a-be83-01d298b6982d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.037018] env[62522]: DEBUG oslo_vmware.api [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Waiting for the task: (returnval){ [ 935.037018] env[62522]: value = "task-2415738" [ 935.037018] env[62522]: _type = "Task" [ 935.037018] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.043343] env[62522]: DEBUG oslo_vmware.api [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Task: {'id': task-2415738, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.174289] env[62522]: DEBUG oslo_concurrency.lockutils [None req-424d9b81-5d17-4c85-a455-05d29bb4f4a4 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquiring lock "8b21b749-b872-43f7-a2c5-aefee6c5f3a1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.193799] env[62522]: DEBUG oslo_concurrency.lockutils [None req-369c5f96-3ac8-4ae3-a3cb-148bc1853333 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Lock "8b21b749-b872-43f7-a2c5-aefee6c5f3a1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.070s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.194272] env[62522]: DEBUG oslo_concurrency.lockutils [None req-424d9b81-5d17-4c85-a455-05d29bb4f4a4 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Lock "8b21b749-b872-43f7-a2c5-aefee6c5f3a1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.020s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.194643] env[62522]: DEBUG oslo_concurrency.lockutils [None req-424d9b81-5d17-4c85-a455-05d29bb4f4a4 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquiring lock "8b21b749-b872-43f7-a2c5-aefee6c5f3a1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.196036] env[62522]: DEBUG oslo_concurrency.lockutils [None req-424d9b81-5d17-4c85-a455-05d29bb4f4a4 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Lock "8b21b749-b872-43f7-a2c5-aefee6c5f3a1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.196036] env[62522]: DEBUG oslo_concurrency.lockutils [None req-424d9b81-5d17-4c85-a455-05d29bb4f4a4 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Lock "8b21b749-b872-43f7-a2c5-aefee6c5f3a1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.197939] env[62522]: INFO nova.compute.manager [None req-424d9b81-5d17-4c85-a455-05d29bb4f4a4 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Terminating instance [ 935.210205] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2701c645-deb2-4ac2-937a-d79192f626e2 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.210205] env[62522]: DEBUG oslo_concurrency.lockutils [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.104s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.210477] env[62522]: DEBUG nova.objects.instance [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lazy-loading 'resources' on Instance uuid fe1f5581-0dec-41e5-a450-c3de5a573602 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 935.367806] env[62522]: DEBUG oslo_vmware.api [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415737, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.394133] env[62522]: DEBUG oslo_vmware.api [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5211fee1-9ac0-bade-5ddd-a6e5694d84cf, 'name': SearchDatastore_Task, 'duration_secs': 0.013817} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.395172] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0355b7a4-e783-4250-9491-a1ea3d55324b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.403201] env[62522]: DEBUG oslo_vmware.api [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for the task: (returnval){ [ 935.403201] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52cd9df2-1e6d-89f2-de47-5fd1505426b4" [ 935.403201] env[62522]: _type = "Task" [ 935.403201] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.414823] env[62522]: DEBUG oslo_vmware.api [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52cd9df2-1e6d-89f2-de47-5fd1505426b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.549337] env[62522]: DEBUG oslo_vmware.api [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Task: {'id': task-2415738, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.702396] env[62522]: DEBUG nova.compute.manager [None req-424d9b81-5d17-4c85-a455-05d29bb4f4a4 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 935.702873] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-424d9b81-5d17-4c85-a455-05d29bb4f4a4 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 935.704110] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be399863-28a1-4369-a27c-e205eb50b458 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.720547] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-424d9b81-5d17-4c85-a455-05d29bb4f4a4 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 935.721194] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5c6a55db-9c98-4537-9595-a60b101bafaf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.731477] env[62522]: DEBUG oslo_vmware.api [None req-424d9b81-5d17-4c85-a455-05d29bb4f4a4 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 935.731477] env[62522]: value = "task-2415739" [ 935.731477] env[62522]: _type = "Task" [ 935.731477] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.744148] env[62522]: DEBUG oslo_vmware.api [None req-424d9b81-5d17-4c85-a455-05d29bb4f4a4 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415739, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.744996] env[62522]: DEBUG nova.network.neutron [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Updating instance_info_cache with network_info: [{"id": "d830d64b-94fa-4bc8-a3e6-e45c4b0ae629", "address": "fa:16:3e:d4:80:4e", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.185", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd830d64b-94", "ovs_interfaceid": "d830d64b-94fa-4bc8-a3e6-e45c4b0ae629", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.871307] env[62522]: DEBUG oslo_vmware.api [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415737, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.751884} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.872163] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] fcd0eef6-d059-4495-a982-058b6c9626d1/fcd0eef6-d059-4495-a982-058b6c9626d1.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 935.872163] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 935.873435] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-556f8342-c097-4aee-b1b2-beb4f6c5a0f0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.882156] env[62522]: DEBUG oslo_vmware.api [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 935.882156] env[62522]: value = "task-2415740" [ 935.882156] env[62522]: _type = "Task" [ 935.882156] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.891952] env[62522]: DEBUG oslo_vmware.api [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415740, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.918281] env[62522]: DEBUG oslo_vmware.api [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52cd9df2-1e6d-89f2-de47-5fd1505426b4, 'name': SearchDatastore_Task, 'duration_secs': 0.056996} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.918281] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 935.918476] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a/72e054d2-79bb-4ef8-82d1-4e67ba0ef20a.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 935.918696] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cf1bc610-0f39-403c-843c-4dcf9eab3ae8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.931350] env[62522]: DEBUG oslo_vmware.api [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for the task: (returnval){ [ 935.931350] env[62522]: value = "task-2415741" [ 935.931350] env[62522]: _type = "Task" [ 935.931350] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.940807] env[62522]: DEBUG oslo_vmware.api [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415741, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.046138] env[62522]: DEBUG oslo_vmware.api [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Task: {'id': task-2415738, 'name': PowerOnVM_Task, 'duration_secs': 0.659641} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.048750] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 936.048967] env[62522]: INFO nova.compute.manager [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Took 8.60 seconds to spawn the instance on the hypervisor. [ 936.049166] env[62522]: DEBUG nova.compute.manager [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 936.050129] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-774d67f1-c67b-43d0-9e58-62d347c9258e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.240825] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-208b1a4d-f601-4e25-b857-53c97128450c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.245981] env[62522]: DEBUG oslo_vmware.api [None req-424d9b81-5d17-4c85-a455-05d29bb4f4a4 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415739, 'name': PowerOffVM_Task, 'duration_secs': 0.452984} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.246648] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-424d9b81-5d17-4c85-a455-05d29bb4f4a4 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 936.246827] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-424d9b81-5d17-4c85-a455-05d29bb4f4a4 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 936.247082] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-98772b09-40c2-48de-9765-32684cefd39a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.250310] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Releasing lock "refresh_cache-74e52638-d284-4bd1-8cff-c7aca9426f75" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.255025] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6643bac1-a548-4328-b28e-398cc707911d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.286642] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f2bf02b-b0fa-48c7-bc12-fb24a706b658 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.296441] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27a1a3bd-1930-41d5-9e93-8282faa4b22a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.300472] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5297a639-9995-4c62-8ce2-4dc3248e5adc tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Acquiring lock "ff6637e9-2a67-4302-9769-24ec045538d4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.300704] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5297a639-9995-4c62-8ce2-4dc3248e5adc tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Lock "ff6637e9-2a67-4302-9769-24ec045538d4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.301052] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5297a639-9995-4c62-8ce2-4dc3248e5adc tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Acquiring lock "ff6637e9-2a67-4302-9769-24ec045538d4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.301195] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5297a639-9995-4c62-8ce2-4dc3248e5adc tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Lock "ff6637e9-2a67-4302-9769-24ec045538d4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.301438] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5297a639-9995-4c62-8ce2-4dc3248e5adc tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Lock "ff6637e9-2a67-4302-9769-24ec045538d4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.303942] env[62522]: INFO nova.compute.manager [None req-5297a639-9995-4c62-8ce2-4dc3248e5adc tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Terminating instance [ 936.314217] env[62522]: DEBUG nova.compute.provider_tree [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 936.326212] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-424d9b81-5d17-4c85-a455-05d29bb4f4a4 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 936.326212] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-424d9b81-5d17-4c85-a455-05d29bb4f4a4 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 936.326212] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-424d9b81-5d17-4c85-a455-05d29bb4f4a4 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Deleting the datastore file [datastore1] 8b21b749-b872-43f7-a2c5-aefee6c5f3a1 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 936.326212] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d5352293-ae76-43e3-a61d-7c952cc20f29 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.333450] env[62522]: DEBUG oslo_vmware.api [None req-424d9b81-5d17-4c85-a455-05d29bb4f4a4 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for the task: (returnval){ [ 936.333450] env[62522]: value = "task-2415743" [ 936.333450] env[62522]: _type = "Task" [ 936.333450] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.344954] env[62522]: DEBUG oslo_vmware.api [None req-424d9b81-5d17-4c85-a455-05d29bb4f4a4 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415743, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.397785] env[62522]: DEBUG oslo_vmware.api [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415740, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071483} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.397905] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 936.398816] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a784e542-e4b5-4b06-9dcb-a6a3c0a27e3f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.433150] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] fcd0eef6-d059-4495-a982-058b6c9626d1/fcd0eef6-d059-4495-a982-058b6c9626d1.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 936.433438] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-524ecd55-fef6-444b-ad6f-a30f8fbe711f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.457736] env[62522]: DEBUG oslo_vmware.api [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415741, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.459524] env[62522]: DEBUG oslo_vmware.api [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 936.459524] env[62522]: value = "task-2415744" [ 936.459524] env[62522]: _type = "Task" [ 936.459524] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.469597] env[62522]: DEBUG oslo_vmware.api [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415744, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.570160] env[62522]: INFO nova.compute.manager [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Took 50.10 seconds to build instance. [ 936.758050] env[62522]: DEBUG nova.compute.manager [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62522) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 936.758050] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.820969] env[62522]: DEBUG nova.scheduler.client.report [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 936.825129] env[62522]: DEBUG nova.compute.manager [None req-5297a639-9995-4c62-8ce2-4dc3248e5adc tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 936.825129] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5297a639-9995-4c62-8ce2-4dc3248e5adc tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 936.825533] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3d3d1bd-a98c-4c8e-b8ea-95eb6ab304ea {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.835447] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5297a639-9995-4c62-8ce2-4dc3248e5adc tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 936.842891] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8f161f6e-6b4a-40cf-b0ea-3e478a27c467 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.855612] env[62522]: DEBUG oslo_vmware.api [None req-424d9b81-5d17-4c85-a455-05d29bb4f4a4 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Task: {'id': task-2415743, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.404268} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.857900] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-424d9b81-5d17-4c85-a455-05d29bb4f4a4 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 936.857900] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-424d9b81-5d17-4c85-a455-05d29bb4f4a4 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 936.858185] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-424d9b81-5d17-4c85-a455-05d29bb4f4a4 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 936.858479] env[62522]: INFO nova.compute.manager [None req-424d9b81-5d17-4c85-a455-05d29bb4f4a4 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Took 1.16 seconds to destroy the instance on the hypervisor. [ 936.858859] env[62522]: DEBUG oslo.service.loopingcall [None req-424d9b81-5d17-4c85-a455-05d29bb4f4a4 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 936.859363] env[62522]: DEBUG oslo_vmware.api [None req-5297a639-9995-4c62-8ce2-4dc3248e5adc tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the task: (returnval){ [ 936.859363] env[62522]: value = "task-2415745" [ 936.859363] env[62522]: _type = "Task" [ 936.859363] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.859614] env[62522]: DEBUG nova.compute.manager [-] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 936.859718] env[62522]: DEBUG nova.network.neutron [-] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 936.876606] env[62522]: DEBUG oslo_vmware.api [None req-5297a639-9995-4c62-8ce2-4dc3248e5adc tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415745, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.947110] env[62522]: DEBUG oslo_vmware.api [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415741, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.89809} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.948279] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a/72e054d2-79bb-4ef8-82d1-4e67ba0ef20a.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 936.948279] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 936.948279] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-08762502-cab8-4ccf-862f-ff3d88255c89 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.956497] env[62522]: DEBUG oslo_vmware.api [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for the task: (returnval){ [ 936.956497] env[62522]: value = "task-2415746" [ 936.956497] env[62522]: _type = "Task" [ 936.956497] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.977410] env[62522]: DEBUG oslo_vmware.api [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415746, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.980853] env[62522]: DEBUG oslo_vmware.api [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415744, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.015955] env[62522]: DEBUG nova.compute.manager [req-968adfdf-39a4-4a98-8c1a-3897f141cfea req-0b10d05f-d173-433c-9679-a02bff585c81 service nova] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Received event network-changed-5ede0b01-ce31-4403-9ce2-41d300d0c750 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 937.016355] env[62522]: DEBUG nova.compute.manager [req-968adfdf-39a4-4a98-8c1a-3897f141cfea req-0b10d05f-d173-433c-9679-a02bff585c81 service nova] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Refreshing instance network info cache due to event network-changed-5ede0b01-ce31-4403-9ce2-41d300d0c750. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 937.016666] env[62522]: DEBUG oslo_concurrency.lockutils [req-968adfdf-39a4-4a98-8c1a-3897f141cfea req-0b10d05f-d173-433c-9679-a02bff585c81 service nova] Acquiring lock "refresh_cache-17ec01e7-9735-4771-a73c-c4c7634d59f1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.017012] env[62522]: DEBUG oslo_concurrency.lockutils [req-968adfdf-39a4-4a98-8c1a-3897f141cfea req-0b10d05f-d173-433c-9679-a02bff585c81 service nova] Acquired lock "refresh_cache-17ec01e7-9735-4771-a73c-c4c7634d59f1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.017245] env[62522]: DEBUG nova.network.neutron [req-968adfdf-39a4-4a98-8c1a-3897f141cfea req-0b10d05f-d173-433c-9679-a02bff585c81 service nova] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Refreshing network info cache for port 5ede0b01-ce31-4403-9ce2-41d300d0c750 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 937.077931] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ea1f1ef8-940d-4eee-866b-b15c8b54d649 tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Lock "0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.150s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.326801] env[62522]: DEBUG oslo_concurrency.lockutils [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.116s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.329153] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.142s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 937.329153] env[62522]: DEBUG nova.objects.instance [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lazy-loading 'resources' on Instance uuid d30397b4-c617-4717-b624-ad1b06331bea {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 937.347220] env[62522]: DEBUG nova.network.neutron [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Successfully updated port: cac77d67-105b-49eb-9b57-8c60abb3165a {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 937.357669] env[62522]: INFO nova.scheduler.client.report [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Deleted allocations for instance fe1f5581-0dec-41e5-a450-c3de5a573602 [ 937.377535] env[62522]: DEBUG oslo_vmware.api [None req-5297a639-9995-4c62-8ce2-4dc3248e5adc tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415745, 'name': PowerOffVM_Task, 'duration_secs': 0.238708} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.377863] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5297a639-9995-4c62-8ce2-4dc3248e5adc tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 937.378119] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5297a639-9995-4c62-8ce2-4dc3248e5adc tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 937.378691] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-152c4535-b1c2-4f78-8840-2857cdd191a9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.454712] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5297a639-9995-4c62-8ce2-4dc3248e5adc tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 937.455162] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5297a639-9995-4c62-8ce2-4dc3248e5adc tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 937.455162] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-5297a639-9995-4c62-8ce2-4dc3248e5adc tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Deleting the datastore file [datastore1] ff6637e9-2a67-4302-9769-24ec045538d4 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 937.456053] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-df896c17-e7ba-4421-84c3-1990a2e6f652 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.466651] env[62522]: DEBUG oslo_vmware.api [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415746, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.09554} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.466993] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 937.473303] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03ddb425-82a7-4fd4-ba02-8a35df0e6f59 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.476360] env[62522]: DEBUG oslo_vmware.api [None req-5297a639-9995-4c62-8ce2-4dc3248e5adc tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the task: (returnval){ [ 937.476360] env[62522]: value = "task-2415748" [ 937.476360] env[62522]: _type = "Task" [ 937.476360] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.501875] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a/72e054d2-79bb-4ef8-82d1-4e67ba0ef20a.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 937.502571] env[62522]: DEBUG oslo_vmware.api [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415744, 'name': ReconfigVM_Task, 'duration_secs': 1.02319} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.503231] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-41eab13f-3143-4ef8-875a-7bb348caa50c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.518238] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Reconfigured VM instance instance-00000045 to attach disk [datastore2] fcd0eef6-d059-4495-a982-058b6c9626d1/fcd0eef6-d059-4495-a982-058b6c9626d1.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 937.521958] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-39087f18-ae78-4ab4-9e7a-00ccdc5e336d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.525494] env[62522]: DEBUG oslo_vmware.api [None req-5297a639-9995-4c62-8ce2-4dc3248e5adc tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415748, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.532998] env[62522]: DEBUG oslo_vmware.api [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 937.532998] env[62522]: value = "task-2415749" [ 937.532998] env[62522]: _type = "Task" [ 937.532998] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.535810] env[62522]: DEBUG oslo_vmware.api [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for the task: (returnval){ [ 937.535810] env[62522]: value = "task-2415750" [ 937.535810] env[62522]: _type = "Task" [ 937.535810] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.548628] env[62522]: DEBUG oslo_vmware.api [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415750, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.551938] env[62522]: DEBUG oslo_vmware.api [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415749, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.568610] env[62522]: DEBUG nova.network.neutron [req-968adfdf-39a4-4a98-8c1a-3897f141cfea req-0b10d05f-d173-433c-9679-a02bff585c81 service nova] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 937.656420] env[62522]: DEBUG nova.network.neutron [req-968adfdf-39a4-4a98-8c1a-3897f141cfea req-0b10d05f-d173-433c-9679-a02bff585c81 service nova] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.759223] env[62522]: DEBUG nova.network.neutron [-] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.850084] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Acquiring lock "refresh_cache-17ec01e7-9735-4771-a73c-c4c7634d59f1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.868903] env[62522]: DEBUG oslo_concurrency.lockutils [None req-dbaca0e5-5ebe-4019-b082-02701dc33d22 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "fe1f5581-0dec-41e5-a450-c3de5a573602" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.819s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.915492] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9a93cef8-3523-470a-a539-4ffec97fabea tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Acquiring lock "0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 937.915758] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9a93cef8-3523-470a-a539-4ffec97fabea tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Lock "0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 937.915965] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9a93cef8-3523-470a-a539-4ffec97fabea tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Acquiring lock "0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 937.916164] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9a93cef8-3523-470a-a539-4ffec97fabea tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Lock "0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 937.916335] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9a93cef8-3523-470a-a539-4ffec97fabea tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Lock "0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.921406] env[62522]: INFO nova.compute.manager [None req-9a93cef8-3523-470a-a539-4ffec97fabea tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Terminating instance [ 937.991126] env[62522]: DEBUG oslo_vmware.api [None req-5297a639-9995-4c62-8ce2-4dc3248e5adc tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415748, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.342393} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.993660] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-5297a639-9995-4c62-8ce2-4dc3248e5adc tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 937.994951] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5297a639-9995-4c62-8ce2-4dc3248e5adc tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 937.995056] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5297a639-9995-4c62-8ce2-4dc3248e5adc tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 937.996226] env[62522]: INFO nova.compute.manager [None req-5297a639-9995-4c62-8ce2-4dc3248e5adc tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Took 1.17 seconds to destroy the instance on the hypervisor. [ 937.996226] env[62522]: DEBUG oslo.service.loopingcall [None req-5297a639-9995-4c62-8ce2-4dc3248e5adc tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 937.996226] env[62522]: DEBUG nova.compute.manager [-] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 937.996226] env[62522]: DEBUG nova.network.neutron [-] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 938.051779] env[62522]: DEBUG oslo_vmware.api [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415750, 'name': ReconfigVM_Task, 'duration_secs': 0.329268} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.054958] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Reconfigured VM instance instance-00000042 to attach disk [datastore2] 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a/72e054d2-79bb-4ef8-82d1-4e67ba0ef20a.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 938.055673] env[62522]: DEBUG oslo_vmware.api [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415749, 'name': Rename_Task, 'duration_secs': 0.250884} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.058503] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7877a6e4-2edb-48bb-b9f0-00394280bab3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.060290] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 938.060914] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a7a91603-e27f-40e3-80d0-10f71ea8c2a2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.069598] env[62522]: DEBUG oslo_vmware.api [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for the task: (returnval){ [ 938.069598] env[62522]: value = "task-2415751" [ 938.069598] env[62522]: _type = "Task" [ 938.069598] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.071257] env[62522]: DEBUG oslo_vmware.api [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 938.071257] env[62522]: value = "task-2415752" [ 938.071257] env[62522]: _type = "Task" [ 938.071257] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.089934] env[62522]: DEBUG oslo_vmware.api [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415752, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.090305] env[62522]: DEBUG oslo_vmware.api [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415751, 'name': Rename_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.161121] env[62522]: DEBUG oslo_concurrency.lockutils [req-968adfdf-39a4-4a98-8c1a-3897f141cfea req-0b10d05f-d173-433c-9679-a02bff585c81 service nova] Releasing lock "refresh_cache-17ec01e7-9735-4771-a73c-c4c7634d59f1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.161546] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Acquired lock "refresh_cache-17ec01e7-9735-4771-a73c-c4c7634d59f1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.161711] env[62522]: DEBUG nova.network.neutron [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 938.261803] env[62522]: INFO nova.compute.manager [-] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Took 1.40 seconds to deallocate network for instance. [ 938.336074] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0541be38-38ea-4e89-9153-77b06a9423d3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.346393] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c392e04c-441d-4e28-a079-3eb663034c76 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.386864] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf6f806-0942-4f3c-986f-8fe991ba480e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.396663] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-938c48cf-caff-47b6-b91a-295a29b9c730 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.413400] env[62522]: DEBUG nova.compute.provider_tree [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 938.429405] env[62522]: DEBUG nova.compute.manager [None req-9a93cef8-3523-470a-a539-4ffec97fabea tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 938.429757] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9a93cef8-3523-470a-a539-4ffec97fabea tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 938.431242] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fde510c6-3a8e-47b0-9869-578e9761d748 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.441504] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a93cef8-3523-470a-a539-4ffec97fabea tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 938.441814] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4be728ea-1881-4d09-b9d7-c58294a7180a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.451574] env[62522]: DEBUG oslo_vmware.api [None req-9a93cef8-3523-470a-a539-4ffec97fabea tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Waiting for the task: (returnval){ [ 938.451574] env[62522]: value = "task-2415753" [ 938.451574] env[62522]: _type = "Task" [ 938.451574] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.461661] env[62522]: DEBUG oslo_vmware.api [None req-9a93cef8-3523-470a-a539-4ffec97fabea tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Task: {'id': task-2415753, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.585953] env[62522]: DEBUG oslo_vmware.api [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415751, 'name': Rename_Task, 'duration_secs': 0.263794} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.589818] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 938.590214] env[62522]: DEBUG oslo_vmware.api [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415752, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.590553] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c12a90dc-46af-4d6a-b9b5-1ba6b01def2e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.598470] env[62522]: DEBUG oslo_vmware.api [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for the task: (returnval){ [ 938.598470] env[62522]: value = "task-2415754" [ 938.598470] env[62522]: _type = "Task" [ 938.598470] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.608513] env[62522]: DEBUG oslo_vmware.api [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415754, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.715903] env[62522]: DEBUG nova.network.neutron [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 938.775130] env[62522]: DEBUG oslo_concurrency.lockutils [None req-424d9b81-5d17-4c85-a455-05d29bb4f4a4 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 938.779903] env[62522]: DEBUG nova.network.neutron [-] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.916678] env[62522]: DEBUG nova.scheduler.client.report [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 938.965363] env[62522]: DEBUG oslo_vmware.api [None req-9a93cef8-3523-470a-a539-4ffec97fabea tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Task: {'id': task-2415753, 'name': PowerOffVM_Task, 'duration_secs': 0.196814} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.965655] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a93cef8-3523-470a-a539-4ffec97fabea tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 938.965826] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9a93cef8-3523-470a-a539-4ffec97fabea tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 938.966094] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-37accb5a-c4e0-4dcd-aec5-17c5b0ad0fef {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.043414] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9a93cef8-3523-470a-a539-4ffec97fabea tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 939.043709] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9a93cef8-3523-470a-a539-4ffec97fabea tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 939.043897] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a93cef8-3523-470a-a539-4ffec97fabea tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Deleting the datastore file [datastore2] 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 939.044222] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dd876b98-5746-47af-9b95-8779aad8b96d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.052306] env[62522]: DEBUG oslo_vmware.api [None req-9a93cef8-3523-470a-a539-4ffec97fabea tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Waiting for the task: (returnval){ [ 939.052306] env[62522]: value = "task-2415756" [ 939.052306] env[62522]: _type = "Task" [ 939.052306] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.055306] env[62522]: DEBUG nova.compute.manager [req-108bce6b-b823-4a82-9e7c-127c16a991e7 req-76eabcd8-5e6c-4e49-94d1-8f2c6eb4aa43 service nova] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Received event network-vif-plugged-cac77d67-105b-49eb-9b57-8c60abb3165a {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 939.055595] env[62522]: DEBUG oslo_concurrency.lockutils [req-108bce6b-b823-4a82-9e7c-127c16a991e7 req-76eabcd8-5e6c-4e49-94d1-8f2c6eb4aa43 service nova] Acquiring lock "17ec01e7-9735-4771-a73c-c4c7634d59f1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.055892] env[62522]: DEBUG oslo_concurrency.lockutils [req-108bce6b-b823-4a82-9e7c-127c16a991e7 req-76eabcd8-5e6c-4e49-94d1-8f2c6eb4aa43 service nova] Lock "17ec01e7-9735-4771-a73c-c4c7634d59f1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.056203] env[62522]: DEBUG oslo_concurrency.lockutils [req-108bce6b-b823-4a82-9e7c-127c16a991e7 req-76eabcd8-5e6c-4e49-94d1-8f2c6eb4aa43 service nova] Lock "17ec01e7-9735-4771-a73c-c4c7634d59f1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.056833] env[62522]: DEBUG nova.compute.manager [req-108bce6b-b823-4a82-9e7c-127c16a991e7 req-76eabcd8-5e6c-4e49-94d1-8f2c6eb4aa43 service nova] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] No waiting events found dispatching network-vif-plugged-cac77d67-105b-49eb-9b57-8c60abb3165a {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 939.056833] env[62522]: WARNING nova.compute.manager [req-108bce6b-b823-4a82-9e7c-127c16a991e7 req-76eabcd8-5e6c-4e49-94d1-8f2c6eb4aa43 service nova] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Received unexpected event network-vif-plugged-cac77d67-105b-49eb-9b57-8c60abb3165a for instance with vm_state building and task_state spawning. [ 939.057749] env[62522]: DEBUG nova.compute.manager [req-108bce6b-b823-4a82-9e7c-127c16a991e7 req-76eabcd8-5e6c-4e49-94d1-8f2c6eb4aa43 service nova] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Received event network-vif-deleted-808a9620-b31f-4e61-bb51-e2de4a3d3a7e {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 939.057749] env[62522]: DEBUG nova.compute.manager [req-108bce6b-b823-4a82-9e7c-127c16a991e7 req-76eabcd8-5e6c-4e49-94d1-8f2c6eb4aa43 service nova] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Received event network-changed-cac77d67-105b-49eb-9b57-8c60abb3165a {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 939.057749] env[62522]: DEBUG nova.compute.manager [req-108bce6b-b823-4a82-9e7c-127c16a991e7 req-76eabcd8-5e6c-4e49-94d1-8f2c6eb4aa43 service nova] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Refreshing instance network info cache due to event network-changed-cac77d67-105b-49eb-9b57-8c60abb3165a. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 939.057857] env[62522]: DEBUG oslo_concurrency.lockutils [req-108bce6b-b823-4a82-9e7c-127c16a991e7 req-76eabcd8-5e6c-4e49-94d1-8f2c6eb4aa43 service nova] Acquiring lock "refresh_cache-17ec01e7-9735-4771-a73c-c4c7634d59f1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 939.068355] env[62522]: DEBUG oslo_vmware.api [None req-9a93cef8-3523-470a-a539-4ffec97fabea tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Task: {'id': task-2415756, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.086651] env[62522]: DEBUG oslo_vmware.api [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2415752, 'name': PowerOnVM_Task, 'duration_secs': 0.675078} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.087655] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 939.087655] env[62522]: INFO nova.compute.manager [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Took 8.98 seconds to spawn the instance on the hypervisor. [ 939.087655] env[62522]: DEBUG nova.compute.manager [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 939.089689] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c86639-99db-4164-90fb-9f8b7fa5b50b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.112130] env[62522]: DEBUG oslo_vmware.api [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415754, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.227246] env[62522]: DEBUG nova.network.neutron [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Updating instance_info_cache with network_info: [{"id": "5ede0b01-ce31-4403-9ce2-41d300d0c750", "address": "fa:16:3e:2f:e2:3e", "network": {"id": "4f2b672d-dd59-4fe8-a234-3af1afbb04fd", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1951384653", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.126", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ca7e42d226a4ef6b48b882356da8950", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2624812a-9f9c-461d-8b5f-79bea90c7ad3", "external-id": "nsx-vlan-transportzone-123", "segmentation_id": 123, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ede0b01-ce", "ovs_interfaceid": "5ede0b01-ce31-4403-9ce2-41d300d0c750", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "cac77d67-105b-49eb-9b57-8c60abb3165a", "address": "fa:16:3e:59:75:25", "network": {"id": "c92cc05a-9d00-4c01-baa7-9bdfe956f9c3", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-407208332", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.131", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "3ca7e42d226a4ef6b48b882356da8950", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcac77d67-10", "ovs_interfaceid": "cac77d67-105b-49eb-9b57-8c60abb3165a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.285675] env[62522]: INFO nova.compute.manager [-] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Took 1.29 seconds to deallocate network for instance. [ 939.423246] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.094s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.426038] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.212s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.427954] env[62522]: INFO nova.compute.claims [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 939.454054] env[62522]: INFO nova.scheduler.client.report [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Deleted allocations for instance d30397b4-c617-4717-b624-ad1b06331bea [ 939.567760] env[62522]: DEBUG oslo_vmware.api [None req-9a93cef8-3523-470a-a539-4ffec97fabea tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Task: {'id': task-2415756, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.397994} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.567760] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a93cef8-3523-470a-a539-4ffec97fabea tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 939.567760] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9a93cef8-3523-470a-a539-4ffec97fabea tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 939.567760] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9a93cef8-3523-470a-a539-4ffec97fabea tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 939.567760] env[62522]: INFO nova.compute.manager [None req-9a93cef8-3523-470a-a539-4ffec97fabea tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Took 1.14 seconds to destroy the instance on the hypervisor. [ 939.567760] env[62522]: DEBUG oslo.service.loopingcall [None req-9a93cef8-3523-470a-a539-4ffec97fabea tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 939.567760] env[62522]: DEBUG nova.compute.manager [-] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 939.567760] env[62522]: DEBUG nova.network.neutron [-] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 939.612917] env[62522]: INFO nova.compute.manager [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Took 50.75 seconds to build instance. [ 939.622021] env[62522]: DEBUG oslo_vmware.api [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415754, 'name': PowerOnVM_Task, 'duration_secs': 0.517509} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.622021] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 939.622021] env[62522]: DEBUG nova.compute.manager [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 939.622021] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e35d576-3c5d-4f78-8a1d-32c0e5bd8ed9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.733023] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Releasing lock "refresh_cache-17ec01e7-9735-4771-a73c-c4c7634d59f1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 939.733023] env[62522]: DEBUG nova.compute.manager [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Instance network_info: |[{"id": "5ede0b01-ce31-4403-9ce2-41d300d0c750", "address": "fa:16:3e:2f:e2:3e", "network": {"id": "4f2b672d-dd59-4fe8-a234-3af1afbb04fd", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1951384653", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.126", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ca7e42d226a4ef6b48b882356da8950", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2624812a-9f9c-461d-8b5f-79bea90c7ad3", "external-id": "nsx-vlan-transportzone-123", "segmentation_id": 123, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ede0b01-ce", "ovs_interfaceid": "5ede0b01-ce31-4403-9ce2-41d300d0c750", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "cac77d67-105b-49eb-9b57-8c60abb3165a", "address": "fa:16:3e:59:75:25", "network": {"id": "c92cc05a-9d00-4c01-baa7-9bdfe956f9c3", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-407208332", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.131", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "3ca7e42d226a4ef6b48b882356da8950", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcac77d67-10", "ovs_interfaceid": "cac77d67-105b-49eb-9b57-8c60abb3165a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 939.733023] env[62522]: DEBUG oslo_concurrency.lockutils [req-108bce6b-b823-4a82-9e7c-127c16a991e7 req-76eabcd8-5e6c-4e49-94d1-8f2c6eb4aa43 service nova] Acquired lock "refresh_cache-17ec01e7-9735-4771-a73c-c4c7634d59f1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.733023] env[62522]: DEBUG nova.network.neutron [req-108bce6b-b823-4a82-9e7c-127c16a991e7 req-76eabcd8-5e6c-4e49-94d1-8f2c6eb4aa43 service nova] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Refreshing network info cache for port cac77d67-105b-49eb-9b57-8c60abb3165a {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 939.733023] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:e2:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2624812a-9f9c-461d-8b5f-79bea90c7ad3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5ede0b01-ce31-4403-9ce2-41d300d0c750', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:59:75:25', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54c45719-5690-47bf-b45b-6cad9813071e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cac77d67-105b-49eb-9b57-8c60abb3165a', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 939.742839] env[62522]: DEBUG oslo.service.loopingcall [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 939.749447] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 939.749447] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eb6ef2b7-d33e-46dc-ba37-724b8bba9f62 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.781943] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 939.781943] env[62522]: value = "task-2415757" [ 939.781943] env[62522]: _type = "Task" [ 939.781943] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.793168] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5297a639-9995-4c62-8ce2-4dc3248e5adc tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.798409] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415757, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.965172] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ca6aa303-7150-432a-addc-44e5271a1e14 tempest-ListServerFiltersTestJSON-1824471531 tempest-ListServerFiltersTestJSON-1824471531-project-member] Lock "d30397b4-c617-4717-b624-ad1b06331bea" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.513s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.115428] env[62522]: DEBUG oslo_concurrency.lockutils [None req-85e1ea0e-3bee-4b19-8d85-c3b419888646 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "fcd0eef6-d059-4495-a982-058b6c9626d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.524s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.144265] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.167204] env[62522]: DEBUG nova.network.neutron [req-108bce6b-b823-4a82-9e7c-127c16a991e7 req-76eabcd8-5e6c-4e49-94d1-8f2c6eb4aa43 service nova] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Updated VIF entry in instance network info cache for port cac77d67-105b-49eb-9b57-8c60abb3165a. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 940.167204] env[62522]: DEBUG nova.network.neutron [req-108bce6b-b823-4a82-9e7c-127c16a991e7 req-76eabcd8-5e6c-4e49-94d1-8f2c6eb4aa43 service nova] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Updating instance_info_cache with network_info: [{"id": "5ede0b01-ce31-4403-9ce2-41d300d0c750", "address": "fa:16:3e:2f:e2:3e", "network": {"id": "4f2b672d-dd59-4fe8-a234-3af1afbb04fd", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1951384653", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.126", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ca7e42d226a4ef6b48b882356da8950", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2624812a-9f9c-461d-8b5f-79bea90c7ad3", "external-id": "nsx-vlan-transportzone-123", "segmentation_id": 123, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ede0b01-ce", "ovs_interfaceid": "5ede0b01-ce31-4403-9ce2-41d300d0c750", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "cac77d67-105b-49eb-9b57-8c60abb3165a", "address": "fa:16:3e:59:75:25", "network": {"id": "c92cc05a-9d00-4c01-baa7-9bdfe956f9c3", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-407208332", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.131", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "3ca7e42d226a4ef6b48b882356da8950", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcac77d67-10", "ovs_interfaceid": "cac77d67-105b-49eb-9b57-8c60abb3165a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.294403] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415757, 'name': CreateVM_Task, 'duration_secs': 0.452433} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.294587] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 940.295447] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.295616] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.295941] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 940.296216] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b54d01ae-e9d8-4060-90b8-a2a424e480a8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.303179] env[62522]: DEBUG oslo_vmware.api [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Waiting for the task: (returnval){ [ 940.303179] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525a7b87-4c35-8866-4e0c-e0da06564f17" [ 940.303179] env[62522]: _type = "Task" [ 940.303179] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.312726] env[62522]: DEBUG oslo_vmware.api [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525a7b87-4c35-8866-4e0c-e0da06564f17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.662901] env[62522]: DEBUG nova.network.neutron [-] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.671028] env[62522]: DEBUG oslo_concurrency.lockutils [req-108bce6b-b823-4a82-9e7c-127c16a991e7 req-76eabcd8-5e6c-4e49-94d1-8f2c6eb4aa43 service nova] Releasing lock "refresh_cache-17ec01e7-9735-4771-a73c-c4c7634d59f1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 940.671382] env[62522]: DEBUG nova.compute.manager [req-108bce6b-b823-4a82-9e7c-127c16a991e7 req-76eabcd8-5e6c-4e49-94d1-8f2c6eb4aa43 service nova] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Received event network-vif-deleted-b635a257-729f-4428-9bb4-d56e3bb92bf2 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 940.830928] env[62522]: DEBUG oslo_vmware.api [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525a7b87-4c35-8866-4e0c-e0da06564f17, 'name': SearchDatastore_Task, 'duration_secs': 0.042803} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.831530] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 940.831863] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 940.832340] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.832572] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.832773] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 940.833296] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2e4b11c8-51ef-498b-80a7-c07d9f6a8104 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.846217] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 940.846217] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 940.853480] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eda9d017-f467-44a0-acc4-b367d70cad17 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.858775] env[62522]: DEBUG oslo_vmware.api [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Waiting for the task: (returnval){ [ 940.858775] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525e1eee-bf6e-2570-5c36-69ffac1c8176" [ 940.858775] env[62522]: _type = "Task" [ 940.858775] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.867449] env[62522]: DEBUG oslo_vmware.api [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525e1eee-bf6e-2570-5c36-69ffac1c8176, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.969139] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9ac7e07-7f29-416c-afa0-87d5823f02b8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.981425] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee1156e-a223-46b7-a62a-d4d872ac009f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.020401] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dbc9f29-8afd-41ac-98a5-9cfccc61bba4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.029021] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9bef769-0077-4775-9651-9cbca655d141 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.046600] env[62522]: DEBUG nova.compute.provider_tree [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 941.165710] env[62522]: INFO nova.compute.manager [-] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Took 1.60 seconds to deallocate network for instance. [ 941.316101] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Acquiring lock "72e054d2-79bb-4ef8-82d1-4e67ba0ef20a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.316101] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Lock "72e054d2-79bb-4ef8-82d1-4e67ba0ef20a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.316101] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Acquiring lock "72e054d2-79bb-4ef8-82d1-4e67ba0ef20a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.316101] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Lock "72e054d2-79bb-4ef8-82d1-4e67ba0ef20a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.316101] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Lock "72e054d2-79bb-4ef8-82d1-4e67ba0ef20a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.321020] env[62522]: INFO nova.compute.manager [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Terminating instance [ 941.337207] env[62522]: DEBUG nova.compute.manager [req-3012a156-3c62-42eb-8ead-8c3c1f484eea req-04df84f8-55d1-4393-8ecb-7f95c78268ca service nova] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Received event network-vif-deleted-50a9aa3e-b35e-4feb-b010-5e72dd8c2252 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 941.373302] env[62522]: DEBUG oslo_vmware.api [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525e1eee-bf6e-2570-5c36-69ffac1c8176, 'name': SearchDatastore_Task, 'duration_secs': 0.034053} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.374599] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1f24753-2915-4c5a-930e-96e4cb8b3796 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.381678] env[62522]: DEBUG oslo_vmware.api [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Waiting for the task: (returnval){ [ 941.381678] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5208b01b-bab2-d04f-e48b-a02bff471c2c" [ 941.381678] env[62522]: _type = "Task" [ 941.381678] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.391110] env[62522]: DEBUG oslo_vmware.api [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5208b01b-bab2-d04f-e48b-a02bff471c2c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.550333] env[62522]: DEBUG nova.scheduler.client.report [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 941.674214] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9a93cef8-3523-470a-a539-4ffec97fabea tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.770332] env[62522]: DEBUG nova.compute.manager [req-f3a5d16c-5600-4020-acb8-593626546078 req-89254372-9ad8-461c-8a80-0db5e3481661 service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Received event network-changed-954fee91-36f2-497a-a856-6828a519a456 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 941.770891] env[62522]: DEBUG nova.compute.manager [req-f3a5d16c-5600-4020-acb8-593626546078 req-89254372-9ad8-461c-8a80-0db5e3481661 service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Refreshing instance network info cache due to event network-changed-954fee91-36f2-497a-a856-6828a519a456. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 941.771195] env[62522]: DEBUG oslo_concurrency.lockutils [req-f3a5d16c-5600-4020-acb8-593626546078 req-89254372-9ad8-461c-8a80-0db5e3481661 service nova] Acquiring lock "refresh_cache-fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.771420] env[62522]: DEBUG oslo_concurrency.lockutils [req-f3a5d16c-5600-4020-acb8-593626546078 req-89254372-9ad8-461c-8a80-0db5e3481661 service nova] Acquired lock "refresh_cache-fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.771767] env[62522]: DEBUG nova.network.neutron [req-f3a5d16c-5600-4020-acb8-593626546078 req-89254372-9ad8-461c-8a80-0db5e3481661 service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Refreshing network info cache for port 954fee91-36f2-497a-a856-6828a519a456 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 941.829939] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Acquiring lock "refresh_cache-72e054d2-79bb-4ef8-82d1-4e67ba0ef20a" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.834024] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Acquired lock "refresh_cache-72e054d2-79bb-4ef8-82d1-4e67ba0ef20a" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.834024] env[62522]: DEBUG nova.network.neutron [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 941.893359] env[62522]: DEBUG oslo_vmware.api [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5208b01b-bab2-d04f-e48b-a02bff471c2c, 'name': SearchDatastore_Task, 'duration_secs': 0.012864} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.893711] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.894056] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 17ec01e7-9735-4771-a73c-c4c7634d59f1/17ec01e7-9735-4771-a73c-c4c7634d59f1.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 941.894392] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fc704599-1705-448d-aea8-4d0f1503f8ee {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.902747] env[62522]: DEBUG oslo_vmware.api [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Waiting for the task: (returnval){ [ 941.902747] env[62522]: value = "task-2415758" [ 941.902747] env[62522]: _type = "Task" [ 941.902747] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.913844] env[62522]: DEBUG oslo_vmware.api [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415758, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.059066] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.633s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.060188] env[62522]: DEBUG nova.compute.manager [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 942.063923] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.834s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.069354] env[62522]: INFO nova.compute.claims [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 942.363234] env[62522]: DEBUG nova.network.neutron [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 942.418681] env[62522]: DEBUG oslo_vmware.api [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415758, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.520915] env[62522]: DEBUG nova.network.neutron [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.566338] env[62522]: DEBUG nova.compute.utils [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 942.568275] env[62522]: DEBUG nova.compute.manager [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 942.568501] env[62522]: DEBUG nova.network.neutron [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 942.718547] env[62522]: DEBUG nova.policy [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '279803ee12e043bcac870602237a421b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fe395ee3cdf34e01a4c59bb81f581c82', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 942.917816] env[62522]: DEBUG oslo_vmware.api [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415758, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.945349} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.918404] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 17ec01e7-9735-4771-a73c-c4c7634d59f1/17ec01e7-9735-4771-a73c-c4c7634d59f1.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 942.918643] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 942.919205] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a82465cd-1fba-4f14-ad18-420957cc1486 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.927439] env[62522]: DEBUG oslo_vmware.api [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Waiting for the task: (returnval){ [ 942.927439] env[62522]: value = "task-2415759" [ 942.927439] env[62522]: _type = "Task" [ 942.927439] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.939425] env[62522]: DEBUG oslo_vmware.api [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415759, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.941509] env[62522]: DEBUG nova.network.neutron [req-f3a5d16c-5600-4020-acb8-593626546078 req-89254372-9ad8-461c-8a80-0db5e3481661 service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Updated VIF entry in instance network info cache for port 954fee91-36f2-497a-a856-6828a519a456. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 942.942960] env[62522]: DEBUG nova.network.neutron [req-f3a5d16c-5600-4020-acb8-593626546078 req-89254372-9ad8-461c-8a80-0db5e3481661 service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Updating instance_info_cache with network_info: [{"id": "954fee91-36f2-497a-a856-6828a519a456", "address": "fa:16:3e:df:f4:48", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.136", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap954fee91-36", "ovs_interfaceid": "954fee91-36f2-497a-a856-6828a519a456", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.023974] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Releasing lock "refresh_cache-72e054d2-79bb-4ef8-82d1-4e67ba0ef20a" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.026099] env[62522]: DEBUG nova.compute.manager [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 943.026099] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 943.026510] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5242cd9-4f20-4951-910e-e4b5e94a9422 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.038591] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 943.038860] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-57fd46cc-154b-4fbd-9f15-e0c9687953d4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.048276] env[62522]: DEBUG oslo_vmware.api [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for the task: (returnval){ [ 943.048276] env[62522]: value = "task-2415760" [ 943.048276] env[62522]: _type = "Task" [ 943.048276] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.061428] env[62522]: DEBUG oslo_vmware.api [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415760, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.071962] env[62522]: DEBUG nova.compute.manager [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 943.385296] env[62522]: DEBUG nova.network.neutron [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Successfully created port: 7fd5b82e-a20b-4752-9751-44487429dc0c {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 943.446130] env[62522]: DEBUG oslo_concurrency.lockutils [req-f3a5d16c-5600-4020-acb8-593626546078 req-89254372-9ad8-461c-8a80-0db5e3481661 service nova] Releasing lock "refresh_cache-fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.446579] env[62522]: DEBUG oslo_vmware.api [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415759, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.185895} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.446854] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 943.447760] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c42eb978-20f2-42dd-9d68-93664799c647 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.482771] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] 17ec01e7-9735-4771-a73c-c4c7634d59f1/17ec01e7-9735-4771-a73c-c4c7634d59f1.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 943.485820] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0833038e-639c-4e10-900c-64f0bfbecb7e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.512141] env[62522]: DEBUG oslo_vmware.api [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Waiting for the task: (returnval){ [ 943.512141] env[62522]: value = "task-2415761" [ 943.512141] env[62522]: _type = "Task" [ 943.512141] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.521570] env[62522]: DEBUG oslo_vmware.api [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415761, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.562495] env[62522]: DEBUG oslo_vmware.api [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415760, 'name': PowerOffVM_Task, 'duration_secs': 0.139209} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.562788] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 943.562947] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 943.563218] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b5071935-b84e-4660-8f36-f1c548a1fd0f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.605421] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 943.605543] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 943.605727] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Deleting the datastore file [datastore2] 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 943.605994] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-697ca372-6e60-4415-a671-001bb5517cc9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.617275] env[62522]: DEBUG oslo_vmware.api [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for the task: (returnval){ [ 943.617275] env[62522]: value = "task-2415763" [ 943.617275] env[62522]: _type = "Task" [ 943.617275] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.628606] env[62522]: DEBUG oslo_vmware.api [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415763, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.709022] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d8df434-7b2b-413b-9153-278d1397cbc0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.721028] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5c5e27c-e46f-4adc-91e7-1a1cd499d6d9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.757086] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9487cab-bf2f-497e-a3be-15ff6c9f3726 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.769019] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b141223d-481d-416f-a43d-d6f630f3dc26 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.784857] env[62522]: DEBUG nova.compute.provider_tree [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 944.027602] env[62522]: DEBUG oslo_vmware.api [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415761, 'name': ReconfigVM_Task, 'duration_secs': 0.327424} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.028379] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Reconfigured VM instance instance-00000046 to attach disk [datastore2] 17ec01e7-9735-4771-a73c-c4c7634d59f1/17ec01e7-9735-4771-a73c-c4c7634d59f1.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 944.029582] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-76f24f30-9de0-4e16-8d3a-cde2cd742d29 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.040017] env[62522]: DEBUG oslo_vmware.api [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Waiting for the task: (returnval){ [ 944.040017] env[62522]: value = "task-2415764" [ 944.040017] env[62522]: _type = "Task" [ 944.040017] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.049821] env[62522]: DEBUG oslo_vmware.api [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415764, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.089021] env[62522]: DEBUG nova.compute.manager [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 944.126790] env[62522]: DEBUG oslo_vmware.api [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415763, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.346547} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.129181] env[62522]: DEBUG nova.virt.hardware [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 944.129768] env[62522]: DEBUG nova.virt.hardware [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 944.129768] env[62522]: DEBUG nova.virt.hardware [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 944.129875] env[62522]: DEBUG nova.virt.hardware [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 944.129998] env[62522]: DEBUG nova.virt.hardware [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 944.130427] env[62522]: DEBUG nova.virt.hardware [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 944.131563] env[62522]: DEBUG nova.virt.hardware [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 944.133028] env[62522]: DEBUG nova.virt.hardware [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 944.133028] env[62522]: DEBUG nova.virt.hardware [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 944.133028] env[62522]: DEBUG nova.virt.hardware [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 944.133028] env[62522]: DEBUG nova.virt.hardware [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 944.133028] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 944.133028] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 944.133028] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 944.133300] env[62522]: INFO nova.compute.manager [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Took 1.11 seconds to destroy the instance on the hypervisor. [ 944.133503] env[62522]: DEBUG oslo.service.loopingcall [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 944.134305] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a9f0314-9c2f-44d9-a017-474c916ae714 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.137374] env[62522]: DEBUG nova.compute.manager [-] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 944.137515] env[62522]: DEBUG nova.network.neutron [-] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 944.147684] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db25fc5c-9dfc-44d1-9e92-a80db407bef9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.164643] env[62522]: DEBUG nova.network.neutron [-] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 944.289656] env[62522]: DEBUG nova.scheduler.client.report [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 944.551032] env[62522]: DEBUG oslo_vmware.api [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415764, 'name': Rename_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.667519] env[62522]: DEBUG nova.network.neutron [-] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.798425] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.734s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.799672] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 35.498s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.955808] env[62522]: DEBUG oslo_concurrency.lockutils [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "7f8a8270-5014-446c-aa42-ea0b4079e5a9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.956047] env[62522]: DEBUG oslo_concurrency.lockutils [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "7f8a8270-5014-446c-aa42-ea0b4079e5a9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.998707] env[62522]: DEBUG nova.compute.manager [req-70679940-bc0d-4f8d-97c9-ddf834834c52 req-58a5ce48-cb82-4367-8a06-76ccf3143d37 service nova] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Received event network-vif-plugged-7fd5b82e-a20b-4752-9751-44487429dc0c {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 944.998938] env[62522]: DEBUG oslo_concurrency.lockutils [req-70679940-bc0d-4f8d-97c9-ddf834834c52 req-58a5ce48-cb82-4367-8a06-76ccf3143d37 service nova] Acquiring lock "8539afc0-1753-4c37-9fc9-25ec97b97243-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.999162] env[62522]: DEBUG oslo_concurrency.lockutils [req-70679940-bc0d-4f8d-97c9-ddf834834c52 req-58a5ce48-cb82-4367-8a06-76ccf3143d37 service nova] Lock "8539afc0-1753-4c37-9fc9-25ec97b97243-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.999333] env[62522]: DEBUG oslo_concurrency.lockutils [req-70679940-bc0d-4f8d-97c9-ddf834834c52 req-58a5ce48-cb82-4367-8a06-76ccf3143d37 service nova] Lock "8539afc0-1753-4c37-9fc9-25ec97b97243-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.999504] env[62522]: DEBUG nova.compute.manager [req-70679940-bc0d-4f8d-97c9-ddf834834c52 req-58a5ce48-cb82-4367-8a06-76ccf3143d37 service nova] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] No waiting events found dispatching network-vif-plugged-7fd5b82e-a20b-4752-9751-44487429dc0c {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 944.999669] env[62522]: WARNING nova.compute.manager [req-70679940-bc0d-4f8d-97c9-ddf834834c52 req-58a5ce48-cb82-4367-8a06-76ccf3143d37 service nova] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Received unexpected event network-vif-plugged-7fd5b82e-a20b-4752-9751-44487429dc0c for instance with vm_state building and task_state spawning. [ 945.051508] env[62522]: DEBUG oslo_vmware.api [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415764, 'name': Rename_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.173563] env[62522]: INFO nova.compute.manager [-] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Took 1.03 seconds to deallocate network for instance. [ 945.184211] env[62522]: DEBUG nova.network.neutron [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Successfully updated port: 7fd5b82e-a20b-4752-9751-44487429dc0c {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 945.316233] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Acquiring lock "4a865aa1-6b0e-43b6-96ff-cd2209ab01ee" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.316487] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Lock "4a865aa1-6b0e-43b6-96ff-cd2209ab01ee" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.458659] env[62522]: DEBUG nova.compute.manager [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 945.557127] env[62522]: DEBUG oslo_vmware.api [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415764, 'name': Rename_Task, 'duration_secs': 1.180234} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.557353] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 945.557621] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9cfda289-4fa1-41ae-a6dd-07233d199219 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.580105] env[62522]: DEBUG oslo_vmware.api [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Waiting for the task: (returnval){ [ 945.580105] env[62522]: value = "task-2415765" [ 945.580105] env[62522]: _type = "Task" [ 945.580105] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.589746] env[62522]: DEBUG oslo_vmware.api [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415765, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.678500] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.687463] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Acquiring lock "refresh_cache-8539afc0-1753-4c37-9fc9-25ec97b97243" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.687633] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Acquired lock "refresh_cache-8539afc0-1753-4c37-9fc9-25ec97b97243" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.687818] env[62522]: DEBUG nova.network.neutron [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 945.819987] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Applying migration context for instance 74e52638-d284-4bd1-8cff-c7aca9426f75 as it has an incoming, in-progress migration 8cef14b7-9f7c-4125-955c-e7a909c91b4f. Migration status is reverting {{(pid=62522) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 945.822517] env[62522]: INFO nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Updating resource usage from migration 8cef14b7-9f7c-4125-955c-e7a909c91b4f [ 945.826352] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Lock "4a865aa1-6b0e-43b6-96ff-cd2209ab01ee" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.509s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.826588] env[62522]: DEBUG nova.compute.manager [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 945.854686] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance c181ce48-9fe2-4400-9047-f8b5a7159dd3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.854686] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance cd69a052-369b-4809-baf0-a1aec44f4ab5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.854686] env[62522]: WARNING nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance c1fd078c-61d4-4c0f-8c49-0f56a926a087 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 945.854686] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance e813e7da-fd2c-4f10-b2f3-1e2b5c153a19 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.854686] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance ebca687d-4de7-4fd6-99fb-b4f0154abe9c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.854686] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 879354d3-7423-41e2-93f6-0d8d3a120170 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.854686] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 3c4c395c-0625-4569-990d-e2d4ad162c14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.854686] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance bf44e269-0297-473e-b6ce-04a40d0ec1b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.854686] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance a10c4dee-4490-445a-bea2-9f8ef5425d15 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.854686] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 6ef27aee-719c-4089-825d-fc117e867bde actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.854686] env[62522]: WARNING nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance ff6637e9-2a67-4302-9769-24ec045538d4 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 945.855194] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 9141ffdd-cbfa-4efe-a01b-dc1326af474c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.855194] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance c28d2907-5b59-4df8-91a8-4ba0f2047d89 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.855194] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.855194] env[62522]: WARNING nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 945.855326] env[62522]: WARNING nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 043a0a1b-268c-4caa-b1f7-cc7d70c3b314 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 945.855434] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 04a9d357-d094-487b-8f09-2f7e0c35f0d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.855556] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.855677] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.855811] env[62522]: WARNING nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 8b21b749-b872-43f7-a2c5-aefee6c5f3a1 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 945.855942] env[62522]: WARNING nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 945.856119] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance fcd0eef6-d059-4495-a982-058b6c9626d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.856271] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Migration 8cef14b7-9f7c-4125-955c-e7a909c91b4f is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 945.856371] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 74e52638-d284-4bd1-8cff-c7aca9426f75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.856488] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 17ec01e7-9735-4771-a73c-c4c7634d59f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.856600] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 8539afc0-1753-4c37-9fc9-25ec97b97243 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.856710] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance e60d5286-04dd-42bb-ae50-26b0a763d2bc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.980607] env[62522]: DEBUG oslo_concurrency.lockutils [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.091264] env[62522]: DEBUG oslo_vmware.api [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415765, 'name': PowerOnVM_Task, 'duration_secs': 0.472577} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.091543] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 946.091746] env[62522]: INFO nova.compute.manager [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Took 12.08 seconds to spawn the instance on the hypervisor. [ 946.091924] env[62522]: DEBUG nova.compute.manager [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 946.092764] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-747873c4-dc42-44c9-96e2-cc4ec405ef2a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.262755] env[62522]: DEBUG nova.network.neutron [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 946.330950] env[62522]: DEBUG nova.compute.utils [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 946.332473] env[62522]: DEBUG nova.compute.manager [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 946.332680] env[62522]: DEBUG nova.network.neutron [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 946.359549] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 917469c5-20be-4814-814f-a042415be021 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 946.443020] env[62522]: DEBUG nova.policy [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '694574f11ad8413abe80a91cf32d4e31', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ff7c42045a4b4e52b61b54ed0bbcf5f5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 946.525472] env[62522]: DEBUG nova.network.neutron [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Updating instance_info_cache with network_info: [{"id": "7fd5b82e-a20b-4752-9751-44487429dc0c", "address": "fa:16:3e:d4:fd:35", "network": {"id": "2c41dadc-c6bf-4448-a61d-37d0b25e9bca", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1028483532-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fe395ee3cdf34e01a4c59bb81f581c82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7fd5b82e-a2", "ovs_interfaceid": "7fd5b82e-a20b-4752-9751-44487429dc0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.615468] env[62522]: INFO nova.compute.manager [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Took 54.93 seconds to build instance. [ 946.820195] env[62522]: DEBUG nova.network.neutron [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Successfully created port: 68be6786-9e14-4d60-800f-6744b965ccd0 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 946.839294] env[62522]: DEBUG nova.compute.manager [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 946.865016] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 947.029095] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Releasing lock "refresh_cache-8539afc0-1753-4c37-9fc9-25ec97b97243" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 947.029430] env[62522]: DEBUG nova.compute.manager [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Instance network_info: |[{"id": "7fd5b82e-a20b-4752-9751-44487429dc0c", "address": "fa:16:3e:d4:fd:35", "network": {"id": "2c41dadc-c6bf-4448-a61d-37d0b25e9bca", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1028483532-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fe395ee3cdf34e01a4c59bb81f581c82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7fd5b82e-a2", "ovs_interfaceid": "7fd5b82e-a20b-4752-9751-44487429dc0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 947.029978] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d4:fd:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3cc0a33d-17c0-4b87-b48f-413a87a4cc6a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7fd5b82e-a20b-4752-9751-44487429dc0c', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 947.038657] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Creating folder: Project (fe395ee3cdf34e01a4c59bb81f581c82). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 947.039011] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ad582244-fb41-419b-a749-f2b40082a848 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.052509] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Created folder: Project (fe395ee3cdf34e01a4c59bb81f581c82) in parent group-v489562. [ 947.052785] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Creating folder: Instances. Parent ref: group-v489757. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 947.052988] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a6bfc5ce-f911-4b07-b0a3-6d4568ecf600 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.067059] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Created folder: Instances in parent group-v489757. [ 947.067405] env[62522]: DEBUG oslo.service.loopingcall [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 947.067636] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 947.067857] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-af7c6396-f743-49c5-82c6-3f7e0a6b4378 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.089437] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 947.089437] env[62522]: value = "task-2415768" [ 947.089437] env[62522]: _type = "Task" [ 947.089437] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.097986] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415768, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.121601] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a47a1c91-7eaf-45ac-8915-ff1fa3263aad tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Lock "17ec01e7-9735-4771-a73c-c4c7634d59f1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.019s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.248050] env[62522]: DEBUG nova.compute.manager [req-4935f222-d9b7-4386-8249-965ce3a6b611 req-e604c66d-a57a-459b-a4cb-cec6bb84dc8b service nova] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Received event network-changed-7fd5b82e-a20b-4752-9751-44487429dc0c {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 947.248050] env[62522]: DEBUG nova.compute.manager [req-4935f222-d9b7-4386-8249-965ce3a6b611 req-e604c66d-a57a-459b-a4cb-cec6bb84dc8b service nova] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Refreshing instance network info cache due to event network-changed-7fd5b82e-a20b-4752-9751-44487429dc0c. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 947.248453] env[62522]: DEBUG oslo_concurrency.lockutils [req-4935f222-d9b7-4386-8249-965ce3a6b611 req-e604c66d-a57a-459b-a4cb-cec6bb84dc8b service nova] Acquiring lock "refresh_cache-8539afc0-1753-4c37-9fc9-25ec97b97243" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.248453] env[62522]: DEBUG oslo_concurrency.lockutils [req-4935f222-d9b7-4386-8249-965ce3a6b611 req-e604c66d-a57a-459b-a4cb-cec6bb84dc8b service nova] Acquired lock "refresh_cache-8539afc0-1753-4c37-9fc9-25ec97b97243" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.248580] env[62522]: DEBUG nova.network.neutron [req-4935f222-d9b7-4386-8249-965ce3a6b611 req-e604c66d-a57a-459b-a4cb-cec6bb84dc8b service nova] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Refreshing network info cache for port 7fd5b82e-a20b-4752-9751-44487429dc0c {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 947.370984] env[62522]: DEBUG oslo_concurrency.lockutils [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Acquiring lock "17ec01e7-9735-4771-a73c-c4c7634d59f1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.371279] env[62522]: DEBUG oslo_concurrency.lockutils [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Lock "17ec01e7-9735-4771-a73c-c4c7634d59f1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.371411] env[62522]: DEBUG oslo_concurrency.lockutils [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Acquiring lock "17ec01e7-9735-4771-a73c-c4c7634d59f1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.372036] env[62522]: DEBUG oslo_concurrency.lockutils [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Lock "17ec01e7-9735-4771-a73c-c4c7634d59f1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.372036] env[62522]: DEBUG oslo_concurrency.lockutils [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Lock "17ec01e7-9735-4771-a73c-c4c7634d59f1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.374802] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 7e5fc552-748f-4569-bd61-c81a52bb46b0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 947.377205] env[62522]: INFO nova.compute.manager [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Terminating instance [ 947.600268] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415768, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.848956] env[62522]: DEBUG nova.compute.manager [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 947.873881] env[62522]: DEBUG nova.virt.hardware [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 947.874143] env[62522]: DEBUG nova.virt.hardware [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 947.874307] env[62522]: DEBUG nova.virt.hardware [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 947.874490] env[62522]: DEBUG nova.virt.hardware [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 947.874639] env[62522]: DEBUG nova.virt.hardware [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 947.874782] env[62522]: DEBUG nova.virt.hardware [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 947.874987] env[62522]: DEBUG nova.virt.hardware [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 947.875166] env[62522]: DEBUG nova.virt.hardware [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 947.875338] env[62522]: DEBUG nova.virt.hardware [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 947.875505] env[62522]: DEBUG nova.virt.hardware [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 947.875678] env[62522]: DEBUG nova.virt.hardware [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 947.876530] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91071297-3424-4aa2-9840-72f4735eb0bc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.879829] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance e1225c6f-9025-41ff-94fa-a55af49aeed2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 947.881289] env[62522]: DEBUG nova.compute.manager [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 947.881511] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 947.882803] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98cc923a-78f6-4c0a-8674-31c90fd60060 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.888495] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e25b12eb-c64d-4e06-9aa7-66fc0bd046ae {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.897138] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 947.897874] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bc597a94-7bfd-408a-b557-c94a1cff0266 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.910494] env[62522]: DEBUG oslo_vmware.api [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Waiting for the task: (returnval){ [ 947.910494] env[62522]: value = "task-2415769" [ 947.910494] env[62522]: _type = "Task" [ 947.910494] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.918482] env[62522]: DEBUG oslo_vmware.api [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415769, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.971031] env[62522]: DEBUG nova.network.neutron [req-4935f222-d9b7-4386-8249-965ce3a6b611 req-e604c66d-a57a-459b-a4cb-cec6bb84dc8b service nova] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Updated VIF entry in instance network info cache for port 7fd5b82e-a20b-4752-9751-44487429dc0c. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 947.971442] env[62522]: DEBUG nova.network.neutron [req-4935f222-d9b7-4386-8249-965ce3a6b611 req-e604c66d-a57a-459b-a4cb-cec6bb84dc8b service nova] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Updating instance_info_cache with network_info: [{"id": "7fd5b82e-a20b-4752-9751-44487429dc0c", "address": "fa:16:3e:d4:fd:35", "network": {"id": "2c41dadc-c6bf-4448-a61d-37d0b25e9bca", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1028483532-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fe395ee3cdf34e01a4c59bb81f581c82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7fd5b82e-a2", "ovs_interfaceid": "7fd5b82e-a20b-4752-9751-44487429dc0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.101072] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415768, 'name': CreateVM_Task, 'duration_secs': 0.589915} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.101251] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 948.102029] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.102173] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.102542] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 948.102838] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab918027-dd02-407d-8ce4-90536f88adb5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.108107] env[62522]: DEBUG oslo_vmware.api [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Waiting for the task: (returnval){ [ 948.108107] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e4d624-f8d0-ffdd-1d51-0fb430e16bfa" [ 948.108107] env[62522]: _type = "Task" [ 948.108107] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.116875] env[62522]: DEBUG oslo_vmware.api [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e4d624-f8d0-ffdd-1d51-0fb430e16bfa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.386023] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 7f8a8270-5014-446c-aa42-ea0b4079e5a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 948.386023] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Total usable vcpus: 48, total allocated vcpus: 21 {{(pid=62522) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 948.386023] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4672MB phys_disk=200GB used_disk=21GB total_vcpus=48 used_vcpus=21 pci_stats=[] {{(pid=62522) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 948.400180] env[62522]: DEBUG nova.network.neutron [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Successfully updated port: 68be6786-9e14-4d60-800f-6744b965ccd0 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 948.425332] env[62522]: DEBUG oslo_vmware.api [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415769, 'name': PowerOffVM_Task, 'duration_secs': 0.301327} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.425635] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 948.425822] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 948.426374] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ccd768c2-aa7a-4442-815c-ad22b7b2541b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.474720] env[62522]: DEBUG oslo_concurrency.lockutils [req-4935f222-d9b7-4386-8249-965ce3a6b611 req-e604c66d-a57a-459b-a4cb-cec6bb84dc8b service nova] Releasing lock "refresh_cache-8539afc0-1753-4c37-9fc9-25ec97b97243" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 948.579203] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 948.579393] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 948.579590] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Deleting the datastore file [datastore2] 17ec01e7-9735-4771-a73c-c4c7634d59f1 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 948.579848] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b6822823-b94a-448f-9476-76959cb45bd0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.587005] env[62522]: DEBUG oslo_vmware.api [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Waiting for the task: (returnval){ [ 948.587005] env[62522]: value = "task-2415771" [ 948.587005] env[62522]: _type = "Task" [ 948.587005] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.595605] env[62522]: DEBUG oslo_vmware.api [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415771, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.618569] env[62522]: DEBUG oslo_vmware.api [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e4d624-f8d0-ffdd-1d51-0fb430e16bfa, 'name': SearchDatastore_Task, 'duration_secs': 0.015177} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.621026] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 948.621282] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 948.621512] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.621660] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.621837] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 948.622302] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-815ae422-3726-44c0-beb4-6804aaeb38cc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.631067] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 948.631247] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 948.634079] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55fd27c8-7e3f-4156-9282-25168676a9fe {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.639485] env[62522]: DEBUG oslo_vmware.api [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Waiting for the task: (returnval){ [ 948.639485] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5233f661-d23e-999f-b353-7e3c341d50c3" [ 948.639485] env[62522]: _type = "Task" [ 948.639485] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.647320] env[62522]: DEBUG oslo_vmware.api [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5233f661-d23e-999f-b353-7e3c341d50c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.790182] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bbfee77-b1ae-415f-8b33-793df4f07ecd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.797975] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09069a2c-e258-4c2b-9df7-100d80330aec {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.829760] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2275a45d-3978-4c57-be75-0dc284c7ab37 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.837683] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f75dd3a-f1fd-4800-8bb8-7753d1e0f57b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.851898] env[62522]: DEBUG nova.compute.provider_tree [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 948.903098] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Acquiring lock "refresh_cache-e60d5286-04dd-42bb-ae50-26b0a763d2bc" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.903256] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Acquired lock "refresh_cache-e60d5286-04dd-42bb-ae50-26b0a763d2bc" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.903469] env[62522]: DEBUG nova.network.neutron [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 949.097026] env[62522]: DEBUG oslo_vmware.api [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Task: {'id': task-2415771, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143737} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.097026] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 949.097186] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 949.097372] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 949.097549] env[62522]: INFO nova.compute.manager [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Took 1.22 seconds to destroy the instance on the hypervisor. [ 949.098171] env[62522]: DEBUG oslo.service.loopingcall [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 949.098171] env[62522]: DEBUG nova.compute.manager [-] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 949.098171] env[62522]: DEBUG nova.network.neutron [-] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 949.151374] env[62522]: DEBUG oslo_vmware.api [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5233f661-d23e-999f-b353-7e3c341d50c3, 'name': SearchDatastore_Task, 'duration_secs': 0.009337} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.151374] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99baec32-c8bb-4754-8f64-59c0439ae04d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.157056] env[62522]: DEBUG oslo_vmware.api [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Waiting for the task: (returnval){ [ 949.157056] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529d421a-f78e-eae8-385f-813af4f8bacf" [ 949.157056] env[62522]: _type = "Task" [ 949.157056] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.165551] env[62522]: DEBUG oslo_vmware.api [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529d421a-f78e-eae8-385f-813af4f8bacf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.292885] env[62522]: DEBUG nova.compute.manager [req-5928bd71-cda6-4654-ab16-a7deaa5137e5 req-94a55d56-e941-4e63-8ac5-cbdfe603df34 service nova] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Received event network-vif-plugged-68be6786-9e14-4d60-800f-6744b965ccd0 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 949.293130] env[62522]: DEBUG oslo_concurrency.lockutils [req-5928bd71-cda6-4654-ab16-a7deaa5137e5 req-94a55d56-e941-4e63-8ac5-cbdfe603df34 service nova] Acquiring lock "e60d5286-04dd-42bb-ae50-26b0a763d2bc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 949.293348] env[62522]: DEBUG oslo_concurrency.lockutils [req-5928bd71-cda6-4654-ab16-a7deaa5137e5 req-94a55d56-e941-4e63-8ac5-cbdfe603df34 service nova] Lock "e60d5286-04dd-42bb-ae50-26b0a763d2bc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.293524] env[62522]: DEBUG oslo_concurrency.lockutils [req-5928bd71-cda6-4654-ab16-a7deaa5137e5 req-94a55d56-e941-4e63-8ac5-cbdfe603df34 service nova] Lock "e60d5286-04dd-42bb-ae50-26b0a763d2bc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.293691] env[62522]: DEBUG nova.compute.manager [req-5928bd71-cda6-4654-ab16-a7deaa5137e5 req-94a55d56-e941-4e63-8ac5-cbdfe603df34 service nova] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] No waiting events found dispatching network-vif-plugged-68be6786-9e14-4d60-800f-6744b965ccd0 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 949.294166] env[62522]: WARNING nova.compute.manager [req-5928bd71-cda6-4654-ab16-a7deaa5137e5 req-94a55d56-e941-4e63-8ac5-cbdfe603df34 service nova] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Received unexpected event network-vif-plugged-68be6786-9e14-4d60-800f-6744b965ccd0 for instance with vm_state building and task_state spawning. [ 949.294399] env[62522]: DEBUG nova.compute.manager [req-5928bd71-cda6-4654-ab16-a7deaa5137e5 req-94a55d56-e941-4e63-8ac5-cbdfe603df34 service nova] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Received event network-changed-68be6786-9e14-4d60-800f-6744b965ccd0 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 949.294575] env[62522]: DEBUG nova.compute.manager [req-5928bd71-cda6-4654-ab16-a7deaa5137e5 req-94a55d56-e941-4e63-8ac5-cbdfe603df34 service nova] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Refreshing instance network info cache due to event network-changed-68be6786-9e14-4d60-800f-6744b965ccd0. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 949.294801] env[62522]: DEBUG oslo_concurrency.lockutils [req-5928bd71-cda6-4654-ab16-a7deaa5137e5 req-94a55d56-e941-4e63-8ac5-cbdfe603df34 service nova] Acquiring lock "refresh_cache-e60d5286-04dd-42bb-ae50-26b0a763d2bc" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.356972] env[62522]: DEBUG nova.scheduler.client.report [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 949.456823] env[62522]: DEBUG nova.network.neutron [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 949.669724] env[62522]: DEBUG oslo_vmware.api [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529d421a-f78e-eae8-385f-813af4f8bacf, 'name': SearchDatastore_Task, 'duration_secs': 0.010421} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.670585] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.670585] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 8539afc0-1753-4c37-9fc9-25ec97b97243/8539afc0-1753-4c37-9fc9-25ec97b97243.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 949.670585] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b0e2058a-2265-4e97-a45b-caff2b4f1fe4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.677525] env[62522]: DEBUG oslo_vmware.api [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Waiting for the task: (returnval){ [ 949.677525] env[62522]: value = "task-2415772" [ 949.677525] env[62522]: _type = "Task" [ 949.677525] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.686244] env[62522]: DEBUG oslo_vmware.api [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Task: {'id': task-2415772, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.691416] env[62522]: DEBUG nova.network.neutron [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Updating instance_info_cache with network_info: [{"id": "68be6786-9e14-4d60-800f-6744b965ccd0", "address": "fa:16:3e:44:ef:b3", "network": {"id": "66a0ba7b-811b-4a05-b393-268b3dd5e5cc", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-182496051-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff7c42045a4b4e52b61b54ed0bbcf5f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "535b175f-71d3-4226-81ab-ca253f27fedd", "external-id": "nsx-vlan-transportzone-155", "segmentation_id": 155, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68be6786-9e", "ovs_interfaceid": "68be6786-9e14-4d60-800f-6744b965ccd0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.859256] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62522) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 949.859560] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 5.060s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.859841] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bfe7ee29-c280-4eda-86b0-bbf7d3ea59cb tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.264s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.860000] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bfe7ee29-c280-4eda-86b0-bbf7d3ea59cb tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.862643] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.776s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.864436] env[62522]: INFO nova.compute.claims [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 949.867614] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 949.867835] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Cleaning up deleted instances {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11745}} [ 949.897229] env[62522]: INFO nova.scheduler.client.report [None req-bfe7ee29-c280-4eda-86b0-bbf7d3ea59cb tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Deleted allocations for instance c1fd078c-61d4-4c0f-8c49-0f56a926a087 [ 950.187650] env[62522]: DEBUG oslo_vmware.api [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Task: {'id': task-2415772, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.4947} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.187923] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 8539afc0-1753-4c37-9fc9-25ec97b97243/8539afc0-1753-4c37-9fc9-25ec97b97243.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 950.188159] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 950.188424] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e71f92d4-bcc1-485b-a887-aaa38d84077f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.194923] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Releasing lock "refresh_cache-e60d5286-04dd-42bb-ae50-26b0a763d2bc" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.195243] env[62522]: DEBUG nova.compute.manager [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Instance network_info: |[{"id": "68be6786-9e14-4d60-800f-6744b965ccd0", "address": "fa:16:3e:44:ef:b3", "network": {"id": "66a0ba7b-811b-4a05-b393-268b3dd5e5cc", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-182496051-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff7c42045a4b4e52b61b54ed0bbcf5f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "535b175f-71d3-4226-81ab-ca253f27fedd", "external-id": "nsx-vlan-transportzone-155", "segmentation_id": 155, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68be6786-9e", "ovs_interfaceid": "68be6786-9e14-4d60-800f-6744b965ccd0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 950.195485] env[62522]: DEBUG nova.network.neutron [-] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.196700] env[62522]: DEBUG oslo_vmware.api [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Waiting for the task: (returnval){ [ 950.196700] env[62522]: value = "task-2415773" [ 950.196700] env[62522]: _type = "Task" [ 950.196700] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.197137] env[62522]: DEBUG oslo_concurrency.lockutils [req-5928bd71-cda6-4654-ab16-a7deaa5137e5 req-94a55d56-e941-4e63-8ac5-cbdfe603df34 service nova] Acquired lock "refresh_cache-e60d5286-04dd-42bb-ae50-26b0a763d2bc" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.197328] env[62522]: DEBUG nova.network.neutron [req-5928bd71-cda6-4654-ab16-a7deaa5137e5 req-94a55d56-e941-4e63-8ac5-cbdfe603df34 service nova] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Refreshing network info cache for port 68be6786-9e14-4d60-800f-6744b965ccd0 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 950.198358] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:44:ef:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '535b175f-71d3-4226-81ab-ca253f27fedd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '68be6786-9e14-4d60-800f-6744b965ccd0', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 950.206033] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Creating folder: Project (ff7c42045a4b4e52b61b54ed0bbcf5f5). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 950.206727] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ae9f8a35-2da5-40b8-a621-a170af60cdac {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.217700] env[62522]: DEBUG oslo_vmware.api [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Task: {'id': task-2415773, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.222132] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Created folder: Project (ff7c42045a4b4e52b61b54ed0bbcf5f5) in parent group-v489562. [ 950.222335] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Creating folder: Instances. Parent ref: group-v489760. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 950.222581] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-47283650-5a72-47cb-95f2-96b451f3e957 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.231667] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Created folder: Instances in parent group-v489760. [ 950.231914] env[62522]: DEBUG oslo.service.loopingcall [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 950.232170] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 950.232395] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-54564986-01da-4441-98aa-336a0d36c536 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.252682] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 950.252682] env[62522]: value = "task-2415776" [ 950.252682] env[62522]: _type = "Task" [ 950.252682] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.260805] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415776, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.383751] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] There are 45 instances to clean {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11754}} [ 950.384036] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: fe1f5581-0dec-41e5-a450-c3de5a573602] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 950.405777] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bfe7ee29-c280-4eda-86b0-bbf7d3ea59cb tempest-FloatingIPsAssociationTestJSON-237205886 tempest-FloatingIPsAssociationTestJSON-237205886-project-member] Lock "c1fd078c-61d4-4c0f-8c49-0f56a926a087" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.546s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.698074] env[62522]: INFO nova.compute.manager [-] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Took 1.60 seconds to deallocate network for instance. [ 950.720726] env[62522]: DEBUG oslo_vmware.api [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Task: {'id': task-2415773, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066128} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.720726] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 950.722119] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27394ec6-1f99-437c-ac8f-cdfc98ffda23 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.747665] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] 8539afc0-1753-4c37-9fc9-25ec97b97243/8539afc0-1753-4c37-9fc9-25ec97b97243.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 950.748473] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-812d6c77-4f7e-465d-9c2b-4fe778b11462 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.774131] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415776, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.775578] env[62522]: DEBUG oslo_vmware.api [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Waiting for the task: (returnval){ [ 950.775578] env[62522]: value = "task-2415777" [ 950.775578] env[62522]: _type = "Task" [ 950.775578] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.784375] env[62522]: DEBUG oslo_vmware.api [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Task: {'id': task-2415777, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.887215] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: ed7220fa-fee9-4715-acbb-236682c6729e] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 951.048080] env[62522]: DEBUG nova.network.neutron [req-5928bd71-cda6-4654-ab16-a7deaa5137e5 req-94a55d56-e941-4e63-8ac5-cbdfe603df34 service nova] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Updated VIF entry in instance network info cache for port 68be6786-9e14-4d60-800f-6744b965ccd0. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 951.048464] env[62522]: DEBUG nova.network.neutron [req-5928bd71-cda6-4654-ab16-a7deaa5137e5 req-94a55d56-e941-4e63-8ac5-cbdfe603df34 service nova] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Updating instance_info_cache with network_info: [{"id": "68be6786-9e14-4d60-800f-6744b965ccd0", "address": "fa:16:3e:44:ef:b3", "network": {"id": "66a0ba7b-811b-4a05-b393-268b3dd5e5cc", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-182496051-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff7c42045a4b4e52b61b54ed0bbcf5f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "535b175f-71d3-4226-81ab-ca253f27fedd", "external-id": "nsx-vlan-transportzone-155", "segmentation_id": 155, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68be6786-9e", "ovs_interfaceid": "68be6786-9e14-4d60-800f-6744b965ccd0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.205971] env[62522]: DEBUG oslo_concurrency.lockutils [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.293283] env[62522]: DEBUG oslo_vmware.api [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Task: {'id': task-2415777, 'name': ReconfigVM_Task, 'duration_secs': 0.347457} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.293490] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415776, 'name': CreateVM_Task, 'duration_secs': 0.666322} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.293738] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Reconfigured VM instance instance-00000047 to attach disk [datastore1] 8539afc0-1753-4c37-9fc9-25ec97b97243/8539afc0-1753-4c37-9fc9-25ec97b97243.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 951.294311] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 951.294530] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-781fd934-a92b-404b-b425-e5c5d408d4e5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.296463] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 951.296629] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.296932] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 951.297182] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38d2f130-45d1-4f8b-a71b-575968ff7022 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.300339] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b89d996d-ea6c-4d50-963f-b58aa4dc4a81 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.303649] env[62522]: DEBUG oslo_vmware.api [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Waiting for the task: (returnval){ [ 951.303649] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52462e38-7c9b-2a31-2bcf-fd7b8c63aa82" [ 951.303649] env[62522]: _type = "Task" [ 951.303649] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.307975] env[62522]: DEBUG oslo_vmware.api [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Waiting for the task: (returnval){ [ 951.307975] env[62522]: value = "task-2415778" [ 951.307975] env[62522]: _type = "Task" [ 951.307975] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.313697] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-574e90d8-3009-4e08-872d-569180aec09f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.320425] env[62522]: DEBUG oslo_vmware.api [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52462e38-7c9b-2a31-2bcf-fd7b8c63aa82, 'name': SearchDatastore_Task, 'duration_secs': 0.011028} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.321066] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.321303] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 951.321542] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 951.321693] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.322163] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 951.322163] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2413db0c-68ca-46bc-b32c-1b408df2df88 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.327268] env[62522]: DEBUG oslo_vmware.api [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Task: {'id': task-2415778, 'name': Rename_Task} progress is 10%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.353951] env[62522]: DEBUG nova.compute.manager [req-d9f2934d-fdac-49c6-b60e-c6aa353f87f4 req-af78612e-267f-4c8b-8894-56e6a7a53fab service nova] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Received event network-vif-deleted-cac77d67-105b-49eb-9b57-8c60abb3165a {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 951.354177] env[62522]: DEBUG nova.compute.manager [req-d9f2934d-fdac-49c6-b60e-c6aa353f87f4 req-af78612e-267f-4c8b-8894-56e6a7a53fab service nova] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Received event network-vif-deleted-5ede0b01-ce31-4403-9ce2-41d300d0c750 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 951.355025] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc64b818-7dea-42d2-949e-8191fcae936a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.359297] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 951.359406] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 951.360449] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4a9a386-f37c-4517-b1a2-7f88009c55cd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.367825] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd7a8a16-e4db-4d88-8357-5906fca8aca5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.374548] env[62522]: DEBUG oslo_vmware.api [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Waiting for the task: (returnval){ [ 951.374548] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c24bf2-dbad-8ed5-4c6e-f2479aea1035" [ 951.374548] env[62522]: _type = "Task" [ 951.374548] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.387375] env[62522]: DEBUG nova.compute.provider_tree [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 951.395208] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 41a980df-88a9-4f9b-b34b-905b226c0675] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 951.398423] env[62522]: DEBUG oslo_vmware.api [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c24bf2-dbad-8ed5-4c6e-f2479aea1035, 'name': SearchDatastore_Task, 'duration_secs': 0.011808} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.398423] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-831bd53c-03d3-42ea-b515-ec4d11c8e533 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.405107] env[62522]: DEBUG oslo_vmware.api [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Waiting for the task: (returnval){ [ 951.405107] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52695d55-d7dd-e5aa-8ced-acfd68c8e7a3" [ 951.405107] env[62522]: _type = "Task" [ 951.405107] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.414611] env[62522]: DEBUG oslo_vmware.api [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52695d55-d7dd-e5aa-8ced-acfd68c8e7a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.551433] env[62522]: DEBUG oslo_concurrency.lockutils [req-5928bd71-cda6-4654-ab16-a7deaa5137e5 req-94a55d56-e941-4e63-8ac5-cbdfe603df34 service nova] Releasing lock "refresh_cache-e60d5286-04dd-42bb-ae50-26b0a763d2bc" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.821814] env[62522]: DEBUG oslo_vmware.api [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Task: {'id': task-2415778, 'name': Rename_Task, 'duration_secs': 0.165292} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.822104] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 951.822330] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cf064a31-ff4d-4f53-917b-5551435d9a1d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.830178] env[62522]: DEBUG oslo_vmware.api [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Waiting for the task: (returnval){ [ 951.830178] env[62522]: value = "task-2415779" [ 951.830178] env[62522]: _type = "Task" [ 951.830178] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.841458] env[62522]: DEBUG oslo_vmware.api [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Task: {'id': task-2415779, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.891332] env[62522]: DEBUG nova.scheduler.client.report [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 951.901659] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 7a086314-3e49-48e9-82c9-cead8ecb19d1] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 951.916493] env[62522]: DEBUG oslo_vmware.api [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52695d55-d7dd-e5aa-8ced-acfd68c8e7a3, 'name': SearchDatastore_Task, 'duration_secs': 0.01402} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.916738] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.916950] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] e60d5286-04dd-42bb-ae50-26b0a763d2bc/e60d5286-04dd-42bb-ae50-26b0a763d2bc.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 951.917796] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-46bf9402-29d3-403b-9130-1a2ef5370f9f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.924933] env[62522]: DEBUG oslo_vmware.api [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Waiting for the task: (returnval){ [ 951.924933] env[62522]: value = "task-2415780" [ 951.924933] env[62522]: _type = "Task" [ 951.924933] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.934826] env[62522]: DEBUG oslo_vmware.api [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Task: {'id': task-2415780, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.344954] env[62522]: DEBUG oslo_vmware.api [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Task: {'id': task-2415779, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.396674] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.534s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.397617] env[62522]: DEBUG nova.compute.manager [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 952.401435] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7013c7f3-cf4c-41ea-bb6d-82f5f9898686 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.586s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.401727] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7013c7f3-cf4c-41ea-bb6d-82f5f9898686 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.404629] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.018s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.406559] env[62522]: INFO nova.compute.claims [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 952.411733] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 74e663b1-b552-4b71-aa74-308e908d79e7] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 952.437924] env[62522]: DEBUG oslo_vmware.api [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Task: {'id': task-2415780, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50533} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.439076] env[62522]: INFO nova.scheduler.client.report [None req-7013c7f3-cf4c-41ea-bb6d-82f5f9898686 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Deleted allocations for instance 043a0a1b-268c-4caa-b1f7-cc7d70c3b314 [ 952.441095] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] e60d5286-04dd-42bb-ae50-26b0a763d2bc/e60d5286-04dd-42bb-ae50-26b0a763d2bc.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 952.441095] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 952.444560] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c0d6a6a6-cfb6-406d-94e0-e856ed0a1362 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.452719] env[62522]: DEBUG oslo_vmware.api [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Waiting for the task: (returnval){ [ 952.452719] env[62522]: value = "task-2415781" [ 952.452719] env[62522]: _type = "Task" [ 952.452719] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.465520] env[62522]: DEBUG oslo_vmware.api [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Task: {'id': task-2415781, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.841275] env[62522]: DEBUG oslo_vmware.api [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Task: {'id': task-2415779, 'name': PowerOnVM_Task, 'duration_secs': 0.610262} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.842531] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 952.842531] env[62522]: INFO nova.compute.manager [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Took 8.75 seconds to spawn the instance on the hypervisor. [ 952.842531] env[62522]: DEBUG nova.compute.manager [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 952.842763] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ac76157-66b3-4a91-a96a-36461d0cdd6d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.912305] env[62522]: DEBUG nova.compute.utils [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 952.918020] env[62522]: DEBUG nova.compute.manager [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 952.918020] env[62522]: DEBUG nova.network.neutron [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 952.918858] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 5ed51dce-2a56-4389-acf8-280bd93ff5f0] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 952.949453] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7013c7f3-cf4c-41ea-bb6d-82f5f9898686 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Lock "043a0a1b-268c-4caa-b1f7-cc7d70c3b314" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.646s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.964825] env[62522]: DEBUG oslo_vmware.api [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Task: {'id': task-2415781, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074778} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.965128] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 952.965939] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78b89b9c-45ad-408e-8d03-749af3d84801 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.990746] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] e60d5286-04dd-42bb-ae50-26b0a763d2bc/e60d5286-04dd-42bb-ae50-26b0a763d2bc.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 952.991791] env[62522]: DEBUG nova.policy [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0b81d399f06a47bc819693b52bb74004', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ff5da278d2be4ca983424c8291beadec', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 952.993553] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ed679594-6c5e-434e-808e-e770fd4f58e4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.015735] env[62522]: DEBUG oslo_vmware.api [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Waiting for the task: (returnval){ [ 953.015735] env[62522]: value = "task-2415783" [ 953.015735] env[62522]: _type = "Task" [ 953.015735] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.024276] env[62522]: DEBUG oslo_vmware.api [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Task: {'id': task-2415783, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.363044] env[62522]: INFO nova.compute.manager [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Took 46.16 seconds to build instance. [ 953.417770] env[62522]: DEBUG nova.compute.manager [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 953.424073] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 76cb551e-e605-4c80-a6ef-e36681fc0bc2] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 953.528935] env[62522]: DEBUG oslo_vmware.api [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Task: {'id': task-2415783, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.537164] env[62522]: DEBUG nova.network.neutron [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Successfully created port: 195b1951-c091-4db1-82d8-3c20dfcaf6d1 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 953.864544] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8cbd56dc-881d-4fd9-ae61-5c115afa4b2c tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Lock "8539afc0-1753-4c37-9fc9-25ec97b97243" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.129s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.905235] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6153348d-63b7-490a-9f00-bf13fc11c847 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.914111] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29849b08-1112-470a-ae62-66b6b74b228b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.952747] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 504396d8-077d-4563-91b5-a7a6259eea27] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 953.956264] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-275e4e49-c016-44e0-bf32-5147878c6f38 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.966260] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcdc2c1a-bffa-4351-b313-b4c4576d1740 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.982914] env[62522]: DEBUG nova.compute.provider_tree [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 954.028827] env[62522]: DEBUG oslo_vmware.api [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Task: {'id': task-2415783, 'name': ReconfigVM_Task, 'duration_secs': 0.999639} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.029448] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Reconfigured VM instance instance-00000048 to attach disk [datastore1] e60d5286-04dd-42bb-ae50-26b0a763d2bc/e60d5286-04dd-42bb-ae50-26b0a763d2bc.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 954.029892] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-231df236-0e8f-4726-b9ce-2da2cc064341 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.037381] env[62522]: DEBUG oslo_vmware.api [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Waiting for the task: (returnval){ [ 954.037381] env[62522]: value = "task-2415784" [ 954.037381] env[62522]: _type = "Task" [ 954.037381] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.049021] env[62522]: DEBUG oslo_vmware.api [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Task: {'id': task-2415784, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.411352] env[62522]: DEBUG nova.compute.manager [req-6b45b53a-3a9f-4368-a05a-9ae089061d56 req-40184fea-17f1-4387-87c7-bfe42f73887d service nova] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Received event network-changed-7fd5b82e-a20b-4752-9751-44487429dc0c {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 954.411803] env[62522]: DEBUG nova.compute.manager [req-6b45b53a-3a9f-4368-a05a-9ae089061d56 req-40184fea-17f1-4387-87c7-bfe42f73887d service nova] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Refreshing instance network info cache due to event network-changed-7fd5b82e-a20b-4752-9751-44487429dc0c. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 954.414079] env[62522]: DEBUG oslo_concurrency.lockutils [req-6b45b53a-3a9f-4368-a05a-9ae089061d56 req-40184fea-17f1-4387-87c7-bfe42f73887d service nova] Acquiring lock "refresh_cache-8539afc0-1753-4c37-9fc9-25ec97b97243" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 954.414079] env[62522]: DEBUG oslo_concurrency.lockutils [req-6b45b53a-3a9f-4368-a05a-9ae089061d56 req-40184fea-17f1-4387-87c7-bfe42f73887d service nova] Acquired lock "refresh_cache-8539afc0-1753-4c37-9fc9-25ec97b97243" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.414079] env[62522]: DEBUG nova.network.neutron [req-6b45b53a-3a9f-4368-a05a-9ae089061d56 req-40184fea-17f1-4387-87c7-bfe42f73887d service nova] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Refreshing network info cache for port 7fd5b82e-a20b-4752-9751-44487429dc0c {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 954.456087] env[62522]: DEBUG nova.compute.manager [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 954.458826] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: a185273e-cdaf-4967-832b-f75014b7b3f4] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 954.489659] env[62522]: DEBUG nova.scheduler.client.report [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 954.512854] env[62522]: DEBUG nova.virt.hardware [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 954.513130] env[62522]: DEBUG nova.virt.hardware [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 954.513294] env[62522]: DEBUG nova.virt.hardware [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 954.513479] env[62522]: DEBUG nova.virt.hardware [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 954.513660] env[62522]: DEBUG nova.virt.hardware [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 954.513832] env[62522]: DEBUG nova.virt.hardware [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 954.514362] env[62522]: DEBUG nova.virt.hardware [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 954.514624] env[62522]: DEBUG nova.virt.hardware [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 954.514858] env[62522]: DEBUG nova.virt.hardware [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 954.515078] env[62522]: DEBUG nova.virt.hardware [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 954.515576] env[62522]: DEBUG nova.virt.hardware [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 954.516700] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b8daa5-00b4-4fc9-9220-428b136d120d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.534237] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9deb7dd-299d-4a74-8047-7c0649cdde2c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.561033] env[62522]: DEBUG oslo_vmware.api [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Task: {'id': task-2415784, 'name': Rename_Task, 'duration_secs': 0.437997} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.561906] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 954.562210] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-31098830-cb3b-4ffd-8850-6d6935ef25e4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.583246] env[62522]: DEBUG oslo_vmware.api [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Waiting for the task: (returnval){ [ 954.583246] env[62522]: value = "task-2415785" [ 954.583246] env[62522]: _type = "Task" [ 954.583246] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.595219] env[62522]: DEBUG oslo_vmware.api [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Task: {'id': task-2415785, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.962942] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: d68b472d-2139-4e2d-bb28-7e45d80904cb] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 954.998248] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.594s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.002019] env[62522]: DEBUG nova.compute.manager [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 955.003553] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.778s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.005044] env[62522]: INFO nova.compute.claims [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 955.096864] env[62522]: DEBUG oslo_vmware.api [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Task: {'id': task-2415785, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.377367] env[62522]: DEBUG nova.compute.manager [req-2cf498a4-dd4f-4381-a1ab-8acc2d4c3b29 req-794081ff-0c64-47bc-ae27-132322854727 service nova] [instance: 917469c5-20be-4814-814f-a042415be021] Received event network-vif-plugged-195b1951-c091-4db1-82d8-3c20dfcaf6d1 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 955.377587] env[62522]: DEBUG oslo_concurrency.lockutils [req-2cf498a4-dd4f-4381-a1ab-8acc2d4c3b29 req-794081ff-0c64-47bc-ae27-132322854727 service nova] Acquiring lock "917469c5-20be-4814-814f-a042415be021-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.377800] env[62522]: DEBUG oslo_concurrency.lockutils [req-2cf498a4-dd4f-4381-a1ab-8acc2d4c3b29 req-794081ff-0c64-47bc-ae27-132322854727 service nova] Lock "917469c5-20be-4814-814f-a042415be021-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.377974] env[62522]: DEBUG oslo_concurrency.lockutils [req-2cf498a4-dd4f-4381-a1ab-8acc2d4c3b29 req-794081ff-0c64-47bc-ae27-132322854727 service nova] Lock "917469c5-20be-4814-814f-a042415be021-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.378163] env[62522]: DEBUG nova.compute.manager [req-2cf498a4-dd4f-4381-a1ab-8acc2d4c3b29 req-794081ff-0c64-47bc-ae27-132322854727 service nova] [instance: 917469c5-20be-4814-814f-a042415be021] No waiting events found dispatching network-vif-plugged-195b1951-c091-4db1-82d8-3c20dfcaf6d1 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 955.378346] env[62522]: WARNING nova.compute.manager [req-2cf498a4-dd4f-4381-a1ab-8acc2d4c3b29 req-794081ff-0c64-47bc-ae27-132322854727 service nova] [instance: 917469c5-20be-4814-814f-a042415be021] Received unexpected event network-vif-plugged-195b1951-c091-4db1-82d8-3c20dfcaf6d1 for instance with vm_state building and task_state spawning. [ 955.396865] env[62522]: DEBUG nova.network.neutron [req-6b45b53a-3a9f-4368-a05a-9ae089061d56 req-40184fea-17f1-4387-87c7-bfe42f73887d service nova] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Updated VIF entry in instance network info cache for port 7fd5b82e-a20b-4752-9751-44487429dc0c. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 955.396865] env[62522]: DEBUG nova.network.neutron [req-6b45b53a-3a9f-4368-a05a-9ae089061d56 req-40184fea-17f1-4387-87c7-bfe42f73887d service nova] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Updating instance_info_cache with network_info: [{"id": "7fd5b82e-a20b-4752-9751-44487429dc0c", "address": "fa:16:3e:d4:fd:35", "network": {"id": "2c41dadc-c6bf-4448-a61d-37d0b25e9bca", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1028483532-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fe395ee3cdf34e01a4c59bb81f581c82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7fd5b82e-a2", "ovs_interfaceid": "7fd5b82e-a20b-4752-9751-44487429dc0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.472040] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 566c207c-5506-4410-98ab-aee9fdbc5d6e] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 955.505394] env[62522]: DEBUG nova.compute.utils [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 955.506759] env[62522]: DEBUG nova.compute.manager [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 955.506933] env[62522]: DEBUG nova.network.neutron [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 955.510366] env[62522]: DEBUG nova.network.neutron [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Successfully updated port: 195b1951-c091-4db1-82d8-3c20dfcaf6d1 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 955.572520] env[62522]: DEBUG nova.policy [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9694ee575d094ccf845eb57acf3e70c6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '00b27498c07344d1bf9cecefa0fca033', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 955.596195] env[62522]: DEBUG oslo_vmware.api [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Task: {'id': task-2415785, 'name': PowerOnVM_Task, 'duration_secs': 0.82388} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.596473] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 955.596676] env[62522]: INFO nova.compute.manager [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Took 7.75 seconds to spawn the instance on the hypervisor. [ 955.596883] env[62522]: DEBUG nova.compute.manager [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 955.597655] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1ce4ff7-ce0e-4777-8955-902b9a153508 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.900476] env[62522]: DEBUG oslo_concurrency.lockutils [req-6b45b53a-3a9f-4368-a05a-9ae089061d56 req-40184fea-17f1-4387-87c7-bfe42f73887d service nova] Releasing lock "refresh_cache-8539afc0-1753-4c37-9fc9-25ec97b97243" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 955.906139] env[62522]: DEBUG nova.network.neutron [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Successfully created port: 2692fd0a-af0d-49dd-818a-fe819914252b {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 955.975467] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 713dd924-1c96-496a-bd06-cf0235dd6f75] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 956.011676] env[62522]: DEBUG nova.compute.manager [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 956.017616] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "refresh_cache-917469c5-20be-4814-814f-a042415be021" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 956.017822] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquired lock "refresh_cache-917469c5-20be-4814-814f-a042415be021" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.018051] env[62522]: DEBUG nova.network.neutron [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 956.115367] env[62522]: INFO nova.compute.manager [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Took 47.90 seconds to build instance. [ 956.480161] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99fb24a7-d702-4a90-be3f-cf536531cd13 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.483943] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: ee1c638b-1f38-4e21-9369-4d4ff2e13d46] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 956.492593] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-246b3c7d-42a3-4aba-a359-4cb325cd691e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.534602] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3310c775-f7ec-421f-a413-8013719f9ba1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.541959] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37a77323-3fb6-4f01-a041-3055ce56e30b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.555420] env[62522]: DEBUG nova.compute.provider_tree [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 956.565879] env[62522]: DEBUG nova.network.neutron [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 956.574915] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Acquiring lock "e813e7da-fd2c-4f10-b2f3-1e2b5c153a19" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.575173] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Lock "e813e7da-fd2c-4f10-b2f3-1e2b5c153a19" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.575385] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Acquiring lock "e813e7da-fd2c-4f10-b2f3-1e2b5c153a19-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.575656] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Lock "e813e7da-fd2c-4f10-b2f3-1e2b5c153a19-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.575864] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Lock "e813e7da-fd2c-4f10-b2f3-1e2b5c153a19-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.577661] env[62522]: INFO nova.compute.manager [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Terminating instance [ 956.617378] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9127e7e-947b-4174-83f0-f8136ebb6c88 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Lock "e60d5286-04dd-42bb-ae50-26b0a763d2bc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.466s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.712258] env[62522]: DEBUG nova.network.neutron [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Updating instance_info_cache with network_info: [{"id": "195b1951-c091-4db1-82d8-3c20dfcaf6d1", "address": "fa:16:3e:9c:63:e3", "network": {"id": "70e81afa-0eda-49c5-b072-e79b3c287468", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1715169983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff5da278d2be4ca983424c8291beadec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7654928b-7afe-42e3-a18d-68ecc775cefe", "external-id": "cl2-zone-807", "segmentation_id": 807, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap195b1951-c0", "ovs_interfaceid": "195b1951-c091-4db1-82d8-3c20dfcaf6d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.988241] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: d6935c9b-e4cc-47ed-96d5-e485d60382d6] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 957.031833] env[62522]: DEBUG nova.compute.manager [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 957.058755] env[62522]: DEBUG nova.scheduler.client.report [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 957.065838] env[62522]: DEBUG nova.virt.hardware [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 957.066234] env[62522]: DEBUG nova.virt.hardware [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 957.066515] env[62522]: DEBUG nova.virt.hardware [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 957.067060] env[62522]: DEBUG nova.virt.hardware [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 957.067060] env[62522]: DEBUG nova.virt.hardware [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 957.067352] env[62522]: DEBUG nova.virt.hardware [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 957.067706] env[62522]: DEBUG nova.virt.hardware [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 957.068043] env[62522]: DEBUG nova.virt.hardware [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 957.068328] env[62522]: DEBUG nova.virt.hardware [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 957.068665] env[62522]: DEBUG nova.virt.hardware [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 957.069376] env[62522]: DEBUG nova.virt.hardware [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 957.070885] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c8aa46d-465d-4a3b-92b6-0fd3dc7653c6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.083297] env[62522]: DEBUG nova.compute.manager [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 957.083297] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 957.083421] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a84b3b67-c872-4a6e-b133-c3c63d669ab5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.086769] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5846f4b8-7ef8-427c-adc3-e28aea5d9096 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.103474] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 957.103704] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9bd0a20d-0f6d-49ef-a87c-245095f7d507 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.110057] env[62522]: DEBUG oslo_vmware.api [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Waiting for the task: (returnval){ [ 957.110057] env[62522]: value = "task-2415786" [ 957.110057] env[62522]: _type = "Task" [ 957.110057] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.118322] env[62522]: DEBUG oslo_vmware.api [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415786, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.214830] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Releasing lock "refresh_cache-917469c5-20be-4814-814f-a042415be021" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 957.215228] env[62522]: DEBUG nova.compute.manager [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Instance network_info: |[{"id": "195b1951-c091-4db1-82d8-3c20dfcaf6d1", "address": "fa:16:3e:9c:63:e3", "network": {"id": "70e81afa-0eda-49c5-b072-e79b3c287468", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1715169983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff5da278d2be4ca983424c8291beadec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7654928b-7afe-42e3-a18d-68ecc775cefe", "external-id": "cl2-zone-807", "segmentation_id": 807, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap195b1951-c0", "ovs_interfaceid": "195b1951-c091-4db1-82d8-3c20dfcaf6d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 957.216073] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:63:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7654928b-7afe-42e3-a18d-68ecc775cefe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '195b1951-c091-4db1-82d8-3c20dfcaf6d1', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 957.223770] env[62522]: DEBUG oslo.service.loopingcall [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 957.223992] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 917469c5-20be-4814-814f-a042415be021] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 957.224233] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ef4d60bd-5f73-4942-a457-008cf344f3f5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.242025] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Acquiring lock "e60d5286-04dd-42bb-ae50-26b0a763d2bc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.242402] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Lock "e60d5286-04dd-42bb-ae50-26b0a763d2bc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.242725] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Acquiring lock "e60d5286-04dd-42bb-ae50-26b0a763d2bc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.243017] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Lock "e60d5286-04dd-42bb-ae50-26b0a763d2bc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.243339] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Lock "e60d5286-04dd-42bb-ae50-26b0a763d2bc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.245898] env[62522]: INFO nova.compute.manager [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Terminating instance [ 957.254232] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 957.254232] env[62522]: value = "task-2415787" [ 957.254232] env[62522]: _type = "Task" [ 957.254232] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.269285] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415787, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.470314] env[62522]: DEBUG nova.network.neutron [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Successfully updated port: 2692fd0a-af0d-49dd-818a-fe819914252b {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 957.493707] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 0d36b844-554e-46e7-9cf9-ef04b67e8898] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 957.577768] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.573s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.577768] env[62522]: DEBUG nova.compute.manager [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 957.580700] env[62522]: DEBUG oslo_concurrency.lockutils [None req-01dccc48-c48f-4870-8972-b57f018d667a tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.357s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.581633] env[62522]: DEBUG oslo_concurrency.lockutils [None req-01dccc48-c48f-4870-8972-b57f018d667a tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.583751] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.894s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.585587] env[62522]: INFO nova.compute.claims [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 957.592697] env[62522]: DEBUG nova.compute.manager [req-ba07ddc5-da58-4f66-8493-b554a6f6ab68 req-6d300f9b-043d-485e-af16-593f5a01f550 service nova] [instance: 917469c5-20be-4814-814f-a042415be021] Received event network-changed-195b1951-c091-4db1-82d8-3c20dfcaf6d1 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 957.592942] env[62522]: DEBUG nova.compute.manager [req-ba07ddc5-da58-4f66-8493-b554a6f6ab68 req-6d300f9b-043d-485e-af16-593f5a01f550 service nova] [instance: 917469c5-20be-4814-814f-a042415be021] Refreshing instance network info cache due to event network-changed-195b1951-c091-4db1-82d8-3c20dfcaf6d1. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 957.593198] env[62522]: DEBUG oslo_concurrency.lockutils [req-ba07ddc5-da58-4f66-8493-b554a6f6ab68 req-6d300f9b-043d-485e-af16-593f5a01f550 service nova] Acquiring lock "refresh_cache-917469c5-20be-4814-814f-a042415be021" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 957.593352] env[62522]: DEBUG oslo_concurrency.lockutils [req-ba07ddc5-da58-4f66-8493-b554a6f6ab68 req-6d300f9b-043d-485e-af16-593f5a01f550 service nova] Acquired lock "refresh_cache-917469c5-20be-4814-814f-a042415be021" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.593517] env[62522]: DEBUG nova.network.neutron [req-ba07ddc5-da58-4f66-8493-b554a6f6ab68 req-6d300f9b-043d-485e-af16-593f5a01f550 service nova] [instance: 917469c5-20be-4814-814f-a042415be021] Refreshing network info cache for port 195b1951-c091-4db1-82d8-3c20dfcaf6d1 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 957.624940] env[62522]: DEBUG oslo_vmware.api [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415786, 'name': PowerOffVM_Task, 'duration_secs': 0.197231} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.625405] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 957.625560] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 957.625795] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-032c2a17-a41b-4f5a-8ad6-c330a9c9bc7d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.628521] env[62522]: INFO nova.scheduler.client.report [None req-01dccc48-c48f-4870-8972-b57f018d667a tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Deleted allocations for instance 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2 [ 957.698384] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 957.698384] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 957.698384] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Deleting the datastore file [datastore2] e813e7da-fd2c-4f10-b2f3-1e2b5c153a19 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 957.698384] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-51d3c1e8-7539-45d7-8252-cc08c9aeda75 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.707570] env[62522]: DEBUG oslo_vmware.api [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Waiting for the task: (returnval){ [ 957.707570] env[62522]: value = "task-2415789" [ 957.707570] env[62522]: _type = "Task" [ 957.707570] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.718115] env[62522]: DEBUG oslo_vmware.api [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415789, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.753580] env[62522]: DEBUG nova.compute.manager [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 957.755674] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 957.755674] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d56b7ef-7e50-4688-b819-6ba2f69d5226 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.767510] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415787, 'name': CreateVM_Task, 'duration_secs': 0.376759} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.769984] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 917469c5-20be-4814-814f-a042415be021] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 957.770361] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 957.771163] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 957.771400] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.771756] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 957.772020] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d1aaa616-0f0d-4040-93a2-e5a0114db1d4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.773534] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3be115a5-17e3-4d92-9038-107f76270d12 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.778640] env[62522]: DEBUG oslo_vmware.api [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 957.778640] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]524ae2cc-896a-143c-2b61-a30f65d21b66" [ 957.778640] env[62522]: _type = "Task" [ 957.778640] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.783373] env[62522]: DEBUG oslo_vmware.api [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Waiting for the task: (returnval){ [ 957.783373] env[62522]: value = "task-2415790" [ 957.783373] env[62522]: _type = "Task" [ 957.783373] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.790362] env[62522]: DEBUG oslo_vmware.api [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]524ae2cc-896a-143c-2b61-a30f65d21b66, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.797400] env[62522]: DEBUG oslo_vmware.api [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Task: {'id': task-2415790, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.974276] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "refresh_cache-a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 957.974435] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquired lock "refresh_cache-a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.974594] env[62522]: DEBUG nova.network.neutron [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 957.999568] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: d30397b4-c617-4717-b624-ad1b06331bea] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 958.091443] env[62522]: DEBUG nova.compute.utils [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 958.101638] env[62522]: DEBUG nova.compute.manager [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 958.101638] env[62522]: DEBUG nova.network.neutron [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 958.139344] env[62522]: DEBUG oslo_concurrency.lockutils [None req-01dccc48-c48f-4870-8972-b57f018d667a tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "845f99b8-4a9d-4fbe-89e1-825a5ddd01f2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.836s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.190509] env[62522]: DEBUG nova.policy [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4f5cbca1ae3544ab83eeb0eb80606c56', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ed393a0454b643eea75c203d1dfd592c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 958.220687] env[62522]: DEBUG oslo_vmware.api [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Task: {'id': task-2415789, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143338} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.220687] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 958.220874] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 958.221374] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 958.221812] env[62522]: INFO nova.compute.manager [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Took 1.14 seconds to destroy the instance on the hypervisor. [ 958.222105] env[62522]: DEBUG oslo.service.loopingcall [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 958.222426] env[62522]: DEBUG nova.compute.manager [-] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 958.222533] env[62522]: DEBUG nova.network.neutron [-] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 958.291462] env[62522]: DEBUG oslo_vmware.api [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]524ae2cc-896a-143c-2b61-a30f65d21b66, 'name': SearchDatastore_Task, 'duration_secs': 0.011705} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.294272] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.294575] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 958.294826] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 958.294976] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.295172] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 958.295767] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-70244195-8f63-401c-afef-5f09ad4fc604 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.300862] env[62522]: DEBUG oslo_vmware.api [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Task: {'id': task-2415790, 'name': PowerOffVM_Task, 'duration_secs': 0.193014} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.301465] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 958.301726] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 958.301987] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3410f79e-571e-48a3-931f-cfdef3f04ce2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.308443] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 958.308693] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 958.309419] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a3ba232-e50e-4898-a121-5790118f19a3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.315641] env[62522]: DEBUG oslo_vmware.api [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 958.315641] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bfab02-cf38-5ea2-bc7d-5b56520eade5" [ 958.315641] env[62522]: _type = "Task" [ 958.315641] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.324668] env[62522]: DEBUG oslo_vmware.api [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bfab02-cf38-5ea2-bc7d-5b56520eade5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.392860] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 958.392860] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 958.392860] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Deleting the datastore file [datastore1] e60d5286-04dd-42bb-ae50-26b0a763d2bc {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 958.392860] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2c8d79ee-cc32-4d87-97e2-69d356e6e433 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.400383] env[62522]: DEBUG oslo_vmware.api [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Waiting for the task: (returnval){ [ 958.400383] env[62522]: value = "task-2415792" [ 958.400383] env[62522]: _type = "Task" [ 958.400383] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.413944] env[62522]: DEBUG oslo_vmware.api [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Task: {'id': task-2415792, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.475473] env[62522]: DEBUG nova.network.neutron [req-ba07ddc5-da58-4f66-8493-b554a6f6ab68 req-6d300f9b-043d-485e-af16-593f5a01f550 service nova] [instance: 917469c5-20be-4814-814f-a042415be021] Updated VIF entry in instance network info cache for port 195b1951-c091-4db1-82d8-3c20dfcaf6d1. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 958.475939] env[62522]: DEBUG nova.network.neutron [req-ba07ddc5-da58-4f66-8493-b554a6f6ab68 req-6d300f9b-043d-485e-af16-593f5a01f550 service nova] [instance: 917469c5-20be-4814-814f-a042415be021] Updating instance_info_cache with network_info: [{"id": "195b1951-c091-4db1-82d8-3c20dfcaf6d1", "address": "fa:16:3e:9c:63:e3", "network": {"id": "70e81afa-0eda-49c5-b072-e79b3c287468", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1715169983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff5da278d2be4ca983424c8291beadec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7654928b-7afe-42e3-a18d-68ecc775cefe", "external-id": "cl2-zone-807", "segmentation_id": 807, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap195b1951-c0", "ovs_interfaceid": "195b1951-c091-4db1-82d8-3c20dfcaf6d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.503328] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 63a7f41d-13cc-420a-96d3-a3f102869137] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 958.529168] env[62522]: DEBUG nova.network.neutron [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 958.604790] env[62522]: DEBUG nova.compute.manager [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 958.758476] env[62522]: DEBUG nova.network.neutron [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Successfully created port: f02543d0-0c93-4b22-b8b1-b3a08a67ba0c {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 958.773525] env[62522]: DEBUG nova.network.neutron [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Updating instance_info_cache with network_info: [{"id": "2692fd0a-af0d-49dd-818a-fe819914252b", "address": "fa:16:3e:62:a9:ab", "network": {"id": "2037da36-cd13-4cb1-95f4-08d1e174336c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1895994075-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00b27498c07344d1bf9cecefa0fca033", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2692fd0a-af", "ovs_interfaceid": "2692fd0a-af0d-49dd-818a-fe819914252b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.829383] env[62522]: DEBUG oslo_vmware.api [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bfab02-cf38-5ea2-bc7d-5b56520eade5, 'name': SearchDatastore_Task, 'duration_secs': 0.010605} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.830747] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45f224fd-fbad-4a7e-a0ee-26dd556902d5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.842301] env[62522]: DEBUG oslo_vmware.api [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 958.842301] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5285917e-9a12-fed6-52dd-0047d281e124" [ 958.842301] env[62522]: _type = "Task" [ 958.842301] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.853477] env[62522]: DEBUG oslo_vmware.api [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5285917e-9a12-fed6-52dd-0047d281e124, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.915180] env[62522]: DEBUG oslo_vmware.api [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Task: {'id': task-2415792, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157866} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.916033] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 958.916247] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 958.916429] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 958.916641] env[62522]: INFO nova.compute.manager [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Took 1.16 seconds to destroy the instance on the hypervisor. [ 958.916895] env[62522]: DEBUG oslo.service.loopingcall [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 958.917232] env[62522]: DEBUG nova.compute.manager [-] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 958.917307] env[62522]: DEBUG nova.network.neutron [-] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 958.929374] env[62522]: DEBUG nova.compute.manager [req-caaa4694-e974-4498-91e4-ca1fdfff161d req-a571aa65-14a8-42a8-931c-34410dde817f service nova] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Received event network-vif-deleted-648bb769-184a-43cb-a66e-9b36814e4e4a {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 958.929374] env[62522]: INFO nova.compute.manager [req-caaa4694-e974-4498-91e4-ca1fdfff161d req-a571aa65-14a8-42a8-931c-34410dde817f service nova] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Neutron deleted interface 648bb769-184a-43cb-a66e-9b36814e4e4a; detaching it from the instance and deleting it from the info cache [ 958.929559] env[62522]: DEBUG nova.network.neutron [req-caaa4694-e974-4498-91e4-ca1fdfff161d req-a571aa65-14a8-42a8-931c-34410dde817f service nova] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.981032] env[62522]: DEBUG oslo_concurrency.lockutils [req-ba07ddc5-da58-4f66-8493-b554a6f6ab68 req-6d300f9b-043d-485e-af16-593f5a01f550 service nova] Releasing lock "refresh_cache-917469c5-20be-4814-814f-a042415be021" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.981131] env[62522]: DEBUG nova.compute.manager [req-ba07ddc5-da58-4f66-8493-b554a6f6ab68 req-6d300f9b-043d-485e-af16-593f5a01f550 service nova] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Received event network-vif-plugged-2692fd0a-af0d-49dd-818a-fe819914252b {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 958.981339] env[62522]: DEBUG oslo_concurrency.lockutils [req-ba07ddc5-da58-4f66-8493-b554a6f6ab68 req-6d300f9b-043d-485e-af16-593f5a01f550 service nova] Acquiring lock "a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 958.981543] env[62522]: DEBUG oslo_concurrency.lockutils [req-ba07ddc5-da58-4f66-8493-b554a6f6ab68 req-6d300f9b-043d-485e-af16-593f5a01f550 service nova] Lock "a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.981710] env[62522]: DEBUG oslo_concurrency.lockutils [req-ba07ddc5-da58-4f66-8493-b554a6f6ab68 req-6d300f9b-043d-485e-af16-593f5a01f550 service nova] Lock "a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.982187] env[62522]: DEBUG nova.compute.manager [req-ba07ddc5-da58-4f66-8493-b554a6f6ab68 req-6d300f9b-043d-485e-af16-593f5a01f550 service nova] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] No waiting events found dispatching network-vif-plugged-2692fd0a-af0d-49dd-818a-fe819914252b {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 958.982187] env[62522]: WARNING nova.compute.manager [req-ba07ddc5-da58-4f66-8493-b554a6f6ab68 req-6d300f9b-043d-485e-af16-593f5a01f550 service nova] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Received unexpected event network-vif-plugged-2692fd0a-af0d-49dd-818a-fe819914252b for instance with vm_state building and task_state spawning. [ 959.006618] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: d266aff3-42b4-4dcb-b8ca-7c13cdf8d314] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 959.111856] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73508348-160d-4987-abc4-9e5773b779d7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.128514] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da000e26-e2dc-4fc1-bfaa-e6ec3eb84f84 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.168375] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03cba907-3b71-4e2b-8d3c-abfbdc04dc9c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.178772] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c55391c2-47eb-4344-8236-d9857e4e6f04 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.195645] env[62522]: DEBUG nova.compute.provider_tree [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 959.276690] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Releasing lock "refresh_cache-a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 959.277134] env[62522]: DEBUG nova.compute.manager [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Instance network_info: |[{"id": "2692fd0a-af0d-49dd-818a-fe819914252b", "address": "fa:16:3e:62:a9:ab", "network": {"id": "2037da36-cd13-4cb1-95f4-08d1e174336c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1895994075-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00b27498c07344d1bf9cecefa0fca033", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2692fd0a-af", "ovs_interfaceid": "2692fd0a-af0d-49dd-818a-fe819914252b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 959.277649] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:62:a9:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f65996a3-f865-4492-9377-cd14ec8b3aae', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2692fd0a-af0d-49dd-818a-fe819914252b', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 959.287370] env[62522]: DEBUG oslo.service.loopingcall [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 959.287370] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 959.287370] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c58127db-b4e6-4b72-a838-b8233401d57a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.309320] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 959.309320] env[62522]: value = "task-2415793" [ 959.309320] env[62522]: _type = "Task" [ 959.309320] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.319412] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415793, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.352991] env[62522]: DEBUG oslo_vmware.api [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5285917e-9a12-fed6-52dd-0047d281e124, 'name': SearchDatastore_Task, 'duration_secs': 0.010906} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.353293] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 959.353577] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 917469c5-20be-4814-814f-a042415be021/917469c5-20be-4814-814f-a042415be021.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 959.353854] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-24837627-9a17-4af7-b73a-55eb0605da96 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.364190] env[62522]: DEBUG oslo_vmware.api [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 959.364190] env[62522]: value = "task-2415794" [ 959.364190] env[62522]: _type = "Task" [ 959.364190] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.379195] env[62522]: DEBUG oslo_vmware.api [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415794, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.404297] env[62522]: DEBUG nova.network.neutron [-] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.433284] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2bc54b9f-c4eb-4e48-b3ab-9ce510960260 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.447209] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-858525a3-0a09-40d0-9027-2b62ff8d07fa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.488335] env[62522]: DEBUG nova.compute.manager [req-caaa4694-e974-4498-91e4-ca1fdfff161d req-a571aa65-14a8-42a8-931c-34410dde817f service nova] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Detach interface failed, port_id=648bb769-184a-43cb-a66e-9b36814e4e4a, reason: Instance e813e7da-fd2c-4f10-b2f3-1e2b5c153a19 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 959.509758] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 5b69254a-b34b-48ff-a96c-d8573c9abf3b] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 959.620760] env[62522]: DEBUG nova.compute.manager [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 959.628016] env[62522]: DEBUG nova.compute.manager [req-feff66f3-86f4-4ce2-9e2e-78c7be77ae41 req-dd337399-7f3b-4d08-aa57-e44f5d424194 service nova] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Received event network-changed-2692fd0a-af0d-49dd-818a-fe819914252b {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 959.629643] env[62522]: DEBUG nova.compute.manager [req-feff66f3-86f4-4ce2-9e2e-78c7be77ae41 req-dd337399-7f3b-4d08-aa57-e44f5d424194 service nova] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Refreshing instance network info cache due to event network-changed-2692fd0a-af0d-49dd-818a-fe819914252b. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 959.629903] env[62522]: DEBUG oslo_concurrency.lockutils [req-feff66f3-86f4-4ce2-9e2e-78c7be77ae41 req-dd337399-7f3b-4d08-aa57-e44f5d424194 service nova] Acquiring lock "refresh_cache-a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 959.630073] env[62522]: DEBUG oslo_concurrency.lockutils [req-feff66f3-86f4-4ce2-9e2e-78c7be77ae41 req-dd337399-7f3b-4d08-aa57-e44f5d424194 service nova] Acquired lock "refresh_cache-a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.630247] env[62522]: DEBUG nova.network.neutron [req-feff66f3-86f4-4ce2-9e2e-78c7be77ae41 req-dd337399-7f3b-4d08-aa57-e44f5d424194 service nova] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Refreshing network info cache for port 2692fd0a-af0d-49dd-818a-fe819914252b {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 959.653800] env[62522]: DEBUG nova.virt.hardware [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 959.654069] env[62522]: DEBUG nova.virt.hardware [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 959.654233] env[62522]: DEBUG nova.virt.hardware [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 959.654420] env[62522]: DEBUG nova.virt.hardware [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 959.654572] env[62522]: DEBUG nova.virt.hardware [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 959.654721] env[62522]: DEBUG nova.virt.hardware [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 959.655007] env[62522]: DEBUG nova.virt.hardware [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 959.655218] env[62522]: DEBUG nova.virt.hardware [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 959.655400] env[62522]: DEBUG nova.virt.hardware [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 959.655569] env[62522]: DEBUG nova.virt.hardware [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 959.655745] env[62522]: DEBUG nova.virt.hardware [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 959.656648] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed1a3005-e6e6-433d-923d-c0cc5d4507de {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.667208] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f89abad6-3057-4b20-ae55-226e20a9e425 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.720058] env[62522]: ERROR nova.scheduler.client.report [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [req-9e0d58a4-2825-4b49-8e39-d56c030d784f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c7fa38b2-245d-4337-a012-22c1a01c0a72. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9e0d58a4-2825-4b49-8e39-d56c030d784f"}]} [ 959.820198] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415793, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.834462] env[62522]: DEBUG nova.scheduler.client.report [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Refreshing inventories for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 959.858450] env[62522]: DEBUG nova.scheduler.client.report [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Updating ProviderTree inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 959.858711] env[62522]: DEBUG nova.compute.provider_tree [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 959.875953] env[62522]: DEBUG oslo_vmware.api [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415794, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513025} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.876973] env[62522]: DEBUG nova.scheduler.client.report [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Refreshing aggregate associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, aggregates: None {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 959.878955] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 917469c5-20be-4814-814f-a042415be021/917469c5-20be-4814-814f-a042415be021.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 959.879214] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 959.880476] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1630b5c6-e78a-4435-881a-cb58635fed75 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.886573] env[62522]: DEBUG oslo_vmware.api [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 959.886573] env[62522]: value = "task-2415795" [ 959.886573] env[62522]: _type = "Task" [ 959.886573] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.901666] env[62522]: DEBUG nova.network.neutron [-] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.905113] env[62522]: DEBUG oslo_vmware.api [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415795, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.908797] env[62522]: DEBUG nova.scheduler.client.report [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Refreshing trait associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 959.911441] env[62522]: INFO nova.compute.manager [-] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Took 1.69 seconds to deallocate network for instance. [ 960.016020] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 194c1dd8-3b0a-4c29-9779-65f1534121d1] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 960.325599] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415793, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.332883] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef7b3984-4df0-4150-b26e-6036055337d0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.341368] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-430a22ef-4c5b-4b97-b3e0-08f45b8b7b1d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.376496] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-697a0746-7a69-4a8b-b467-bc10fa5e3125 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.385306] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80363abf-1d37-44bf-ad8c-712958c9fe2f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.398020] env[62522]: DEBUG oslo_vmware.api [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415795, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075335} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.408791] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 960.409253] env[62522]: INFO nova.compute.manager [-] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Took 1.49 seconds to deallocate network for instance. [ 960.410030] env[62522]: DEBUG nova.compute.provider_tree [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 960.412250] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e45e527f-19b1-4644-bc80-4577d5eec561 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.417583] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 960.451222] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] 917469c5-20be-4814-814f-a042415be021/917469c5-20be-4814-814f-a042415be021.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 960.451799] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3afd1908-ff8f-4ff6-b02c-76837cb4b791 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.482992] env[62522]: DEBUG oslo_vmware.api [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 960.482992] env[62522]: value = "task-2415796" [ 960.482992] env[62522]: _type = "Task" [ 960.482992] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.495912] env[62522]: DEBUG oslo_vmware.api [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415796, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.506887] env[62522]: DEBUG nova.network.neutron [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Successfully updated port: f02543d0-0c93-4b22-b8b1-b3a08a67ba0c {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 960.513913] env[62522]: DEBUG nova.network.neutron [req-feff66f3-86f4-4ce2-9e2e-78c7be77ae41 req-dd337399-7f3b-4d08-aa57-e44f5d424194 service nova] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Updated VIF entry in instance network info cache for port 2692fd0a-af0d-49dd-818a-fe819914252b. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 960.514233] env[62522]: DEBUG nova.network.neutron [req-feff66f3-86f4-4ce2-9e2e-78c7be77ae41 req-dd337399-7f3b-4d08-aa57-e44f5d424194 service nova] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Updating instance_info_cache with network_info: [{"id": "2692fd0a-af0d-49dd-818a-fe819914252b", "address": "fa:16:3e:62:a9:ab", "network": {"id": "2037da36-cd13-4cb1-95f4-08d1e174336c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1895994075-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00b27498c07344d1bf9cecefa0fca033", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2692fd0a-af", "ovs_interfaceid": "2692fd0a-af0d-49dd-818a-fe819914252b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.519096] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 87a90c88-6e0a-4051-8978-b2f9c5a876ca] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 960.829258] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415793, 'name': CreateVM_Task, 'duration_secs': 1.416877} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.830999] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 960.830999] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 960.830999] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.830999] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 960.831294] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c44b85a-76bc-41ff-a181-9cbee1188786 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.837586] env[62522]: DEBUG oslo_vmware.api [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 960.837586] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5282ce98-06f7-754a-3e81-81cebe89b203" [ 960.837586] env[62522]: _type = "Task" [ 960.837586] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.848616] env[62522]: DEBUG oslo_vmware.api [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5282ce98-06f7-754a-3e81-81cebe89b203, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.918710] env[62522]: DEBUG nova.scheduler.client.report [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 960.929260] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 960.994199] env[62522]: DEBUG oslo_vmware.api [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415796, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.011497] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquiring lock "refresh_cache-7e5fc552-748f-4569-bd61-c81a52bb46b0" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 961.011737] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquired lock "refresh_cache-7e5fc552-748f-4569-bd61-c81a52bb46b0" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.011883] env[62522]: DEBUG nova.network.neutron [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 961.016650] env[62522]: DEBUG oslo_concurrency.lockutils [req-feff66f3-86f4-4ce2-9e2e-78c7be77ae41 req-dd337399-7f3b-4d08-aa57-e44f5d424194 service nova] Releasing lock "refresh_cache-a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 961.022241] env[62522]: DEBUG nova.compute.manager [req-4d40c3b3-0a54-4033-89ec-cefb98bcaf00 req-07f85c2a-ddf8-414a-9335-0b8cc14fd9c5 service nova] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Received event network-vif-deleted-68be6786-9e14-4d60-800f-6744b965ccd0 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 961.024910] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: ae3e55b8-00c1-4dae-9276-f46a1e17b80e] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 961.352080] env[62522]: DEBUG oslo_vmware.api [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5282ce98-06f7-754a-3e81-81cebe89b203, 'name': SearchDatastore_Task, 'duration_secs': 0.059891} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.352802] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 961.352891] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 961.353318] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 961.353631] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.353954] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 961.354383] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c365f11e-dacf-4b9f-a764-9253d80def97 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.366053] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 961.366267] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 961.367016] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e6afbf0-2738-475c-b7f8-97bd6cd64e9e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.374225] env[62522]: DEBUG oslo_vmware.api [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 961.374225] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a1978d-5e20-b7d6-4e2f-f03df27e56f6" [ 961.374225] env[62522]: _type = "Task" [ 961.374225] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.382208] env[62522]: DEBUG oslo_vmware.api [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a1978d-5e20-b7d6-4e2f-f03df27e56f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.424292] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.840s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.424879] env[62522]: DEBUG nova.compute.manager [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 961.430287] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 24.672s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.496809] env[62522]: DEBUG oslo_vmware.api [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415796, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.527423] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 8461f823-e48a-42f0-8863-44177565b82d] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 961.576139] env[62522]: DEBUG nova.network.neutron [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 961.730024] env[62522]: DEBUG nova.compute.manager [req-5324664e-143b-4c06-b564-65bcf9422e68 req-dc6f1d7e-c320-4813-a8f1-cb41c92d33fb service nova] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Received event network-vif-plugged-f02543d0-0c93-4b22-b8b1-b3a08a67ba0c {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 961.730024] env[62522]: DEBUG oslo_concurrency.lockutils [req-5324664e-143b-4c06-b564-65bcf9422e68 req-dc6f1d7e-c320-4813-a8f1-cb41c92d33fb service nova] Acquiring lock "7e5fc552-748f-4569-bd61-c81a52bb46b0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.730024] env[62522]: DEBUG oslo_concurrency.lockutils [req-5324664e-143b-4c06-b564-65bcf9422e68 req-dc6f1d7e-c320-4813-a8f1-cb41c92d33fb service nova] Lock "7e5fc552-748f-4569-bd61-c81a52bb46b0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.730024] env[62522]: DEBUG oslo_concurrency.lockutils [req-5324664e-143b-4c06-b564-65bcf9422e68 req-dc6f1d7e-c320-4813-a8f1-cb41c92d33fb service nova] Lock "7e5fc552-748f-4569-bd61-c81a52bb46b0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.730024] env[62522]: DEBUG nova.compute.manager [req-5324664e-143b-4c06-b564-65bcf9422e68 req-dc6f1d7e-c320-4813-a8f1-cb41c92d33fb service nova] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] No waiting events found dispatching network-vif-plugged-f02543d0-0c93-4b22-b8b1-b3a08a67ba0c {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 961.730024] env[62522]: WARNING nova.compute.manager [req-5324664e-143b-4c06-b564-65bcf9422e68 req-dc6f1d7e-c320-4813-a8f1-cb41c92d33fb service nova] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Received unexpected event network-vif-plugged-f02543d0-0c93-4b22-b8b1-b3a08a67ba0c for instance with vm_state building and task_state spawning. [ 961.730024] env[62522]: DEBUG nova.compute.manager [req-5324664e-143b-4c06-b564-65bcf9422e68 req-dc6f1d7e-c320-4813-a8f1-cb41c92d33fb service nova] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Received event network-changed-f02543d0-0c93-4b22-b8b1-b3a08a67ba0c {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 961.730024] env[62522]: DEBUG nova.compute.manager [req-5324664e-143b-4c06-b564-65bcf9422e68 req-dc6f1d7e-c320-4813-a8f1-cb41c92d33fb service nova] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Refreshing instance network info cache due to event network-changed-f02543d0-0c93-4b22-b8b1-b3a08a67ba0c. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 961.730024] env[62522]: DEBUG oslo_concurrency.lockutils [req-5324664e-143b-4c06-b564-65bcf9422e68 req-dc6f1d7e-c320-4813-a8f1-cb41c92d33fb service nova] Acquiring lock "refresh_cache-7e5fc552-748f-4569-bd61-c81a52bb46b0" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 961.811427] env[62522]: DEBUG nova.network.neutron [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Updating instance_info_cache with network_info: [{"id": "f02543d0-0c93-4b22-b8b1-b3a08a67ba0c", "address": "fa:16:3e:3d:39:46", "network": {"id": "b837f0fb-c2e1-46dd-93b2-62d6c4352316", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1813744063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed393a0454b643eea75c203d1dfd592c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf02543d0-0c", "ovs_interfaceid": "f02543d0-0c93-4b22-b8b1-b3a08a67ba0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.885253] env[62522]: DEBUG oslo_vmware.api [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a1978d-5e20-b7d6-4e2f-f03df27e56f6, 'name': SearchDatastore_Task, 'duration_secs': 0.010473} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.886408] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-509abc7d-531a-47c2-a839-e174bcd9aca4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.899180] env[62522]: DEBUG oslo_vmware.api [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 961.899180] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c65d1b-6551-6ac4-dc13-59867ae8dfcf" [ 961.899180] env[62522]: _type = "Task" [ 961.899180] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.909855] env[62522]: DEBUG oslo_vmware.api [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c65d1b-6551-6ac4-dc13-59867ae8dfcf, 'name': SearchDatastore_Task, 'duration_secs': 0.010692} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.909855] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 961.909855] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72/a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 961.909855] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dc7c2db2-120a-4bb4-b6fa-aa2ccc921fa0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.916206] env[62522]: DEBUG oslo_vmware.api [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 961.916206] env[62522]: value = "task-2415797" [ 961.916206] env[62522]: _type = "Task" [ 961.916206] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.926332] env[62522]: DEBUG oslo_vmware.api [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415797, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.935083] env[62522]: DEBUG nova.objects.instance [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lazy-loading 'migration_context' on Instance uuid 74e52638-d284-4bd1-8cff-c7aca9426f75 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 961.937269] env[62522]: DEBUG nova.compute.utils [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 961.939049] env[62522]: DEBUG nova.compute.manager [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 961.939049] env[62522]: DEBUG nova.network.neutron [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 961.991187] env[62522]: DEBUG nova.policy [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3549d85b612044969af8fda179d169ba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61314d3f0b9e4c368312e714a953e549', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 961.997423] env[62522]: DEBUG oslo_vmware.api [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415796, 'name': ReconfigVM_Task, 'duration_secs': 1.420766} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.998119] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Reconfigured VM instance instance-00000049 to attach disk [datastore2] 917469c5-20be-4814-814f-a042415be021/917469c5-20be-4814-814f-a042415be021.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 961.998370] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f5f1d54a-d5ab-4316-a95f-494b9d8190b1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.008641] env[62522]: DEBUG oslo_vmware.api [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 962.008641] env[62522]: value = "task-2415798" [ 962.008641] env[62522]: _type = "Task" [ 962.008641] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.024023] env[62522]: DEBUG oslo_vmware.api [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415798, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.032599] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: cce5f0d4-364d-4295-a27d-44ca8585f803] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 962.317436] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Releasing lock "refresh_cache-7e5fc552-748f-4569-bd61-c81a52bb46b0" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 962.318025] env[62522]: DEBUG nova.compute.manager [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Instance network_info: |[{"id": "f02543d0-0c93-4b22-b8b1-b3a08a67ba0c", "address": "fa:16:3e:3d:39:46", "network": {"id": "b837f0fb-c2e1-46dd-93b2-62d6c4352316", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1813744063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed393a0454b643eea75c203d1dfd592c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf02543d0-0c", "ovs_interfaceid": "f02543d0-0c93-4b22-b8b1-b3a08a67ba0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 962.318211] env[62522]: DEBUG oslo_concurrency.lockutils [req-5324664e-143b-4c06-b564-65bcf9422e68 req-dc6f1d7e-c320-4813-a8f1-cb41c92d33fb service nova] Acquired lock "refresh_cache-7e5fc552-748f-4569-bd61-c81a52bb46b0" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.318745] env[62522]: DEBUG nova.network.neutron [req-5324664e-143b-4c06-b564-65bcf9422e68 req-dc6f1d7e-c320-4813-a8f1-cb41c92d33fb service nova] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Refreshing network info cache for port f02543d0-0c93-4b22-b8b1-b3a08a67ba0c {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 962.319912] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3d:39:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '09bf081b-cdf0-4977-abe2-2339a87409ab', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f02543d0-0c93-4b22-b8b1-b3a08a67ba0c', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 962.332606] env[62522]: DEBUG oslo.service.loopingcall [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 962.333755] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 962.334010] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-50cffb15-61ae-40a7-bf48-e7742a9ed796 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.354941] env[62522]: DEBUG oslo_concurrency.lockutils [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Acquiring lock "ec2d78cf-15f9-441b-9800-8fcc513f7774" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.355054] env[62522]: DEBUG oslo_concurrency.lockutils [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Lock "ec2d78cf-15f9-441b-9800-8fcc513f7774" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.360594] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 962.360594] env[62522]: value = "task-2415799" [ 962.360594] env[62522]: _type = "Task" [ 962.360594] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.372469] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415799, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.429054] env[62522]: DEBUG oslo_vmware.api [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415797, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48428} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.429387] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72/a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 962.429636] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 962.429951] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0b0ddd3a-c9f3-4ba7-a6ab-e1651a22977e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.438878] env[62522]: DEBUG oslo_vmware.api [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 962.438878] env[62522]: value = "task-2415800" [ 962.438878] env[62522]: _type = "Task" [ 962.438878] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.448276] env[62522]: DEBUG nova.compute.manager [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 962.452788] env[62522]: DEBUG nova.network.neutron [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Successfully created port: 5ffc472e-5334-485a-9155-fed81971c096 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 962.462688] env[62522]: DEBUG oslo_vmware.api [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415800, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.525884] env[62522]: DEBUG oslo_vmware.api [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415798, 'name': Rename_Task, 'duration_secs': 0.20237} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.525884] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 962.528182] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a97835e3-b2ff-4f63-8ac0-bf1807420c3e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.539090] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 17e1557d-e4cf-45b0-84da-4cbcffe31fb6] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 962.541124] env[62522]: DEBUG oslo_vmware.api [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 962.541124] env[62522]: value = "task-2415801" [ 962.541124] env[62522]: _type = "Task" [ 962.541124] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.554586] env[62522]: DEBUG oslo_vmware.api [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415801, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.857675] env[62522]: DEBUG nova.compute.manager [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 962.880014] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415799, 'name': CreateVM_Task, 'duration_secs': 0.378365} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.880141] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 962.881231] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 962.881231] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.881351] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 962.881582] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3da2b1d9-a222-4041-a12c-a42797590c8a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.892963] env[62522]: DEBUG oslo_vmware.api [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 962.892963] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52426811-174b-b7ed-4427-614379b6c0d9" [ 962.892963] env[62522]: _type = "Task" [ 962.892963] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.905372] env[62522]: DEBUG oslo_vmware.api [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52426811-174b-b7ed-4427-614379b6c0d9, 'name': SearchDatastore_Task, 'duration_secs': 0.010342} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.906481] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 962.906724] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 962.906952] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 962.908358] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.908586] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 962.909142] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fba0b648-bfa5-45cf-ac16-c45d1fe1f377 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.919281] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 962.919281] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 962.922983] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1457e7ff-6dd7-457c-8058-4cb1da3f5086 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.930426] env[62522]: DEBUG oslo_vmware.api [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 962.930426] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5289d6af-4bf1-cb9d-04b0-5e03c6170f6f" [ 962.930426] env[62522]: _type = "Task" [ 962.930426] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.941476] env[62522]: DEBUG oslo_vmware.api [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5289d6af-4bf1-cb9d-04b0-5e03c6170f6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.952514] env[62522]: DEBUG oslo_vmware.api [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415800, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074724} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.955148] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 962.956282] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb076ed9-5be6-4837-a0f1-48449a78fa24 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.989624] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72/a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 962.992809] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0612b2de-87e4-4ebb-980d-1e4e12b6c4fd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.010152] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c39da637-408f-464d-95bf-ebfa158ff1fa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.020207] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6073114b-ab59-4ef3-ba39-d4988074a398 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.023981] env[62522]: DEBUG oslo_vmware.api [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 963.023981] env[62522]: value = "task-2415802" [ 963.023981] env[62522]: _type = "Task" [ 963.023981] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.053074] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 253a2903-2601-4f0a-8882-e7510406f9d5] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 963.061129] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25dce18d-4ecb-44e4-9c5c-d98361ad2538 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.068059] env[62522]: DEBUG oslo_vmware.api [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415802, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.075933] env[62522]: DEBUG oslo_vmware.api [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415801, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.077421] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50fa2d05-7ba9-422d-8ca8-4c5310af15f0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.093165] env[62522]: DEBUG nova.compute.provider_tree [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 963.147853] env[62522]: DEBUG nova.network.neutron [req-5324664e-143b-4c06-b564-65bcf9422e68 req-dc6f1d7e-c320-4813-a8f1-cb41c92d33fb service nova] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Updated VIF entry in instance network info cache for port f02543d0-0c93-4b22-b8b1-b3a08a67ba0c. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 963.148241] env[62522]: DEBUG nova.network.neutron [req-5324664e-143b-4c06-b564-65bcf9422e68 req-dc6f1d7e-c320-4813-a8f1-cb41c92d33fb service nova] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Updating instance_info_cache with network_info: [{"id": "f02543d0-0c93-4b22-b8b1-b3a08a67ba0c", "address": "fa:16:3e:3d:39:46", "network": {"id": "b837f0fb-c2e1-46dd-93b2-62d6c4352316", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1813744063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed393a0454b643eea75c203d1dfd592c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf02543d0-0c", "ovs_interfaceid": "f02543d0-0c93-4b22-b8b1-b3a08a67ba0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.391400] env[62522]: DEBUG oslo_concurrency.lockutils [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 963.442880] env[62522]: DEBUG oslo_vmware.api [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5289d6af-4bf1-cb9d-04b0-5e03c6170f6f, 'name': SearchDatastore_Task, 'duration_secs': 0.017517} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.443425] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3bb2aaf-a14a-4c70-84ce-c6b0e16c7fa2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.449439] env[62522]: DEBUG oslo_vmware.api [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 963.449439] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5217b88d-7f22-b7d6-1b4b-a9fce764240a" [ 963.449439] env[62522]: _type = "Task" [ 963.449439] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.458071] env[62522]: DEBUG oslo_vmware.api [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5217b88d-7f22-b7d6-1b4b-a9fce764240a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.465363] env[62522]: DEBUG nova.compute.manager [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 963.492900] env[62522]: DEBUG nova.virt.hardware [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 963.493176] env[62522]: DEBUG nova.virt.hardware [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 963.493349] env[62522]: DEBUG nova.virt.hardware [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 963.493529] env[62522]: DEBUG nova.virt.hardware [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 963.493673] env[62522]: DEBUG nova.virt.hardware [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 963.493821] env[62522]: DEBUG nova.virt.hardware [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 963.494066] env[62522]: DEBUG nova.virt.hardware [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 963.494235] env[62522]: DEBUG nova.virt.hardware [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 963.494408] env[62522]: DEBUG nova.virt.hardware [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 963.494568] env[62522]: DEBUG nova.virt.hardware [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 963.494741] env[62522]: DEBUG nova.virt.hardware [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 963.495638] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5c3c562-5f07-4c35-ab04-109aa404b362 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.504091] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e19e3666-dc7e-4e9d-ba53-1a58ad2910ca {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.533220] env[62522]: DEBUG oslo_vmware.api [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415802, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.563253] env[62522]: DEBUG oslo_vmware.api [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415801, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.564794] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 19d3d54c-5ba1-420f-b012-a08add8546c9] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 963.596575] env[62522]: DEBUG nova.scheduler.client.report [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 963.651138] env[62522]: DEBUG oslo_concurrency.lockutils [req-5324664e-143b-4c06-b564-65bcf9422e68 req-dc6f1d7e-c320-4813-a8f1-cb41c92d33fb service nova] Releasing lock "refresh_cache-7e5fc552-748f-4569-bd61-c81a52bb46b0" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 963.962621] env[62522]: DEBUG oslo_vmware.api [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5217b88d-7f22-b7d6-1b4b-a9fce764240a, 'name': SearchDatastore_Task, 'duration_secs': 0.014804} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.963019] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 963.963443] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 7e5fc552-748f-4569-bd61-c81a52bb46b0/7e5fc552-748f-4569-bd61-c81a52bb46b0.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 963.963820] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-01852c4b-0dc9-4c7d-bca0-be07a1965cee {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.971649] env[62522]: DEBUG oslo_vmware.api [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 963.971649] env[62522]: value = "task-2415803" [ 963.971649] env[62522]: _type = "Task" [ 963.971649] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.979846] env[62522]: DEBUG oslo_vmware.api [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415803, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.036656] env[62522]: DEBUG oslo_vmware.api [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415802, 'name': ReconfigVM_Task, 'duration_secs': 0.592152} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.036942] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Reconfigured VM instance instance-0000004a to attach disk [datastore2] a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72/a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 964.037687] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-48b78169-036b-49a8-a823-2570730783b8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.048927] env[62522]: DEBUG oslo_vmware.api [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 964.048927] env[62522]: value = "task-2415804" [ 964.048927] env[62522]: _type = "Task" [ 964.048927] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.063184] env[62522]: DEBUG oslo_vmware.api [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415804, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.072024] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 68b4c229-0ace-486f-9a99-d3c955b7bdfb] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 964.075611] env[62522]: DEBUG oslo_vmware.api [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415801, 'name': PowerOnVM_Task, 'duration_secs': 1.171522} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.076439] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 964.076439] env[62522]: INFO nova.compute.manager [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Took 9.62 seconds to spawn the instance on the hypervisor. [ 964.076560] env[62522]: DEBUG nova.compute.manager [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 964.078156] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf1a2029-714e-4184-9f7a-b2fe4d82b0cb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.190598] env[62522]: DEBUG nova.compute.manager [req-3f86cd94-8ca8-4b18-ba6e-e25b2a1cb14d req-f99c763d-7934-48d8-b9eb-2c3f2e486313 service nova] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Received event network-vif-plugged-5ffc472e-5334-485a-9155-fed81971c096 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 964.190866] env[62522]: DEBUG oslo_concurrency.lockutils [req-3f86cd94-8ca8-4b18-ba6e-e25b2a1cb14d req-f99c763d-7934-48d8-b9eb-2c3f2e486313 service nova] Acquiring lock "e1225c6f-9025-41ff-94fa-a55af49aeed2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.191100] env[62522]: DEBUG oslo_concurrency.lockutils [req-3f86cd94-8ca8-4b18-ba6e-e25b2a1cb14d req-f99c763d-7934-48d8-b9eb-2c3f2e486313 service nova] Lock "e1225c6f-9025-41ff-94fa-a55af49aeed2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.191281] env[62522]: DEBUG oslo_concurrency.lockutils [req-3f86cd94-8ca8-4b18-ba6e-e25b2a1cb14d req-f99c763d-7934-48d8-b9eb-2c3f2e486313 service nova] Lock "e1225c6f-9025-41ff-94fa-a55af49aeed2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.191567] env[62522]: DEBUG nova.compute.manager [req-3f86cd94-8ca8-4b18-ba6e-e25b2a1cb14d req-f99c763d-7934-48d8-b9eb-2c3f2e486313 service nova] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] No waiting events found dispatching network-vif-plugged-5ffc472e-5334-485a-9155-fed81971c096 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 964.191828] env[62522]: WARNING nova.compute.manager [req-3f86cd94-8ca8-4b18-ba6e-e25b2a1cb14d req-f99c763d-7934-48d8-b9eb-2c3f2e486313 service nova] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Received unexpected event network-vif-plugged-5ffc472e-5334-485a-9155-fed81971c096 for instance with vm_state building and task_state spawning. [ 964.330756] env[62522]: DEBUG nova.network.neutron [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Successfully updated port: 5ffc472e-5334-485a-9155-fed81971c096 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 964.482418] env[62522]: DEBUG oslo_vmware.api [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415803, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.45709} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.482680] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 7e5fc552-748f-4569-bd61-c81a52bb46b0/7e5fc552-748f-4569-bd61-c81a52bb46b0.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 964.482889] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 964.483175] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bc945cc8-d196-4019-b11e-32b4d0345b3f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.490214] env[62522]: DEBUG oslo_vmware.api [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 964.490214] env[62522]: value = "task-2415805" [ 964.490214] env[62522]: _type = "Task" [ 964.490214] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.497787] env[62522]: DEBUG oslo_vmware.api [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415805, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.560441] env[62522]: DEBUG oslo_vmware.api [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415804, 'name': Rename_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.578167] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 6d8b5429-113b-4280-9851-bf6614dde4a7] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 964.597012] env[62522]: INFO nova.compute.manager [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Took 52.54 seconds to build instance. [ 964.608782] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.178s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.615259] env[62522]: DEBUG oslo_concurrency.lockutils [None req-424d9b81-5d17-4c85-a455-05d29bb4f4a4 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.842s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.615465] env[62522]: DEBUG oslo_concurrency.lockutils [None req-424d9b81-5d17-4c85-a455-05d29bb4f4a4 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.617587] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5297a639-9995-4c62-8ce2-4dc3248e5adc tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.825s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.617799] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5297a639-9995-4c62-8ce2-4dc3248e5adc tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.619655] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 24.475s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.619853] env[62522]: DEBUG nova.objects.instance [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62522) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 964.648060] env[62522]: INFO nova.scheduler.client.report [None req-424d9b81-5d17-4c85-a455-05d29bb4f4a4 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Deleted allocations for instance 8b21b749-b872-43f7-a2c5-aefee6c5f3a1 [ 964.650246] env[62522]: INFO nova.scheduler.client.report [None req-5297a639-9995-4c62-8ce2-4dc3248e5adc tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Deleted allocations for instance ff6637e9-2a67-4302-9769-24ec045538d4 [ 964.834990] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "refresh_cache-e1225c6f-9025-41ff-94fa-a55af49aeed2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 964.835172] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquired lock "refresh_cache-e1225c6f-9025-41ff-94fa-a55af49aeed2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.835335] env[62522]: DEBUG nova.network.neutron [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 965.000070] env[62522]: DEBUG oslo_vmware.api [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415805, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074576} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.000309] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 965.001091] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8d2ae1a-a9ce-4913-8fcf-7868ace70a61 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.022722] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] 7e5fc552-748f-4569-bd61-c81a52bb46b0/7e5fc552-748f-4569-bd61-c81a52bb46b0.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 965.022958] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b1b2cc1-d2e9-4a46-ad94-4139edde61b4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.042191] env[62522]: DEBUG oslo_vmware.api [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 965.042191] env[62522]: value = "task-2415806" [ 965.042191] env[62522]: _type = "Task" [ 965.042191] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.049811] env[62522]: DEBUG oslo_vmware.api [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415806, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.058442] env[62522]: DEBUG oslo_vmware.api [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415804, 'name': Rename_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.081192] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: c73686c6-4dd8-4f00-a65a-5d8574409ad1] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 965.098054] env[62522]: DEBUG oslo_concurrency.lockutils [None req-76e83611-7e3e-4a89-8823-9e89a81fcee1 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "917469c5-20be-4814-814f-a042415be021" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.411s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.160515] env[62522]: DEBUG oslo_concurrency.lockutils [None req-424d9b81-5d17-4c85-a455-05d29bb4f4a4 tempest-ImagesOneServerNegativeTestJSON-990045195 tempest-ImagesOneServerNegativeTestJSON-990045195-project-member] Lock "8b21b749-b872-43f7-a2c5-aefee6c5f3a1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.966s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.161723] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5297a639-9995-4c62-8ce2-4dc3248e5adc tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Lock "ff6637e9-2a67-4302-9769-24ec045538d4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.861s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.374047] env[62522]: DEBUG nova.network.neutron [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 965.552696] env[62522]: DEBUG oslo_vmware.api [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415806, 'name': ReconfigVM_Task, 'duration_secs': 0.462234} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.555714] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Reconfigured VM instance instance-0000004b to attach disk [datastore2] 7e5fc552-748f-4569-bd61-c81a52bb46b0/7e5fc552-748f-4569-bd61-c81a52bb46b0.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 965.556347] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-16175bea-9b88-4b8e-9ba3-d480345b6465 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.564885] env[62522]: DEBUG oslo_vmware.api [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415804, 'name': Rename_Task, 'duration_secs': 1.221653} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.566512] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 965.566512] env[62522]: DEBUG oslo_vmware.api [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 965.566512] env[62522]: value = "task-2415807" [ 965.566512] env[62522]: _type = "Task" [ 965.566512] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.566853] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-11ccf286-5ca4-4b65-b091-9afed033ec23 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.579149] env[62522]: DEBUG oslo_vmware.api [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415807, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.579896] env[62522]: DEBUG oslo_vmware.api [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 965.579896] env[62522]: value = "task-2415808" [ 965.579896] env[62522]: _type = "Task" [ 965.579896] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.584050] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 7d561939-df5d-4f8d-99b1-5eb5f0fdfc0a] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 965.590503] env[62522]: DEBUG oslo_vmware.api [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415808, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.633936] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d1eb4ff0-149a-4eee-9fbb-9e36031dbf09 tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.637011] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9a93cef8-3523-470a-a539-4ffec97fabea tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.964s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.637334] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9a93cef8-3523-470a-a539-4ffec97fabea tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.639285] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.961s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.639493] env[62522]: DEBUG nova.objects.instance [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Lazy-loading 'resources' on Instance uuid 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 965.643511] env[62522]: DEBUG nova.network.neutron [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Updating instance_info_cache with network_info: [{"id": "5ffc472e-5334-485a-9155-fed81971c096", "address": "fa:16:3e:f5:8d:fe", "network": {"id": "d6a06fb0-929f-44b6-93c4-698be8498194", "bridge": "br-int", "label": "tempest-ImagesTestJSON-272550236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61314d3f0b9e4c368312e714a953e549", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "399f3826-705c-45f7-9fe0-3a08a945151a", "external-id": "nsx-vlan-transportzone-936", "segmentation_id": 936, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ffc472e-53", "ovs_interfaceid": "5ffc472e-5334-485a-9155-fed81971c096", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.678042] env[62522]: INFO nova.scheduler.client.report [None req-9a93cef8-3523-470a-a539-4ffec97fabea tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Deleted allocations for instance 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c [ 965.823258] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Acquiring lock "6ef27aee-719c-4089-825d-fc117e867bde" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.823258] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Lock "6ef27aee-719c-4089-825d-fc117e867bde" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.823258] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Acquiring lock "6ef27aee-719c-4089-825d-fc117e867bde-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.823258] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Lock "6ef27aee-719c-4089-825d-fc117e867bde-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.823258] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Lock "6ef27aee-719c-4089-825d-fc117e867bde-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.824235] env[62522]: INFO nova.compute.manager [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Terminating instance [ 966.078897] env[62522]: DEBUG oslo_vmware.api [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415807, 'name': Rename_Task, 'duration_secs': 0.164981} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.079199] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 966.079441] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f73f5ca6-20df-4420-bbc1-4aee5bcf6166 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.089748] env[62522]: DEBUG oslo_vmware.api [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415808, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.091142] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 4de70165-c28f-44b7-a01a-caa0787170b8] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 966.093473] env[62522]: DEBUG oslo_vmware.api [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 966.093473] env[62522]: value = "task-2415809" [ 966.093473] env[62522]: _type = "Task" [ 966.093473] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.102127] env[62522]: DEBUG oslo_vmware.api [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415809, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.148835] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Releasing lock "refresh_cache-e1225c6f-9025-41ff-94fa-a55af49aeed2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.149545] env[62522]: DEBUG nova.compute.manager [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Instance network_info: |[{"id": "5ffc472e-5334-485a-9155-fed81971c096", "address": "fa:16:3e:f5:8d:fe", "network": {"id": "d6a06fb0-929f-44b6-93c4-698be8498194", "bridge": "br-int", "label": "tempest-ImagesTestJSON-272550236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61314d3f0b9e4c368312e714a953e549", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "399f3826-705c-45f7-9fe0-3a08a945151a", "external-id": "nsx-vlan-transportzone-936", "segmentation_id": 936, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ffc472e-53", "ovs_interfaceid": "5ffc472e-5334-485a-9155-fed81971c096", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 966.152766] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f5:8d:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '399f3826-705c-45f7-9fe0-3a08a945151a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5ffc472e-5334-485a-9155-fed81971c096', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 966.161592] env[62522]: DEBUG oslo.service.loopingcall [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 966.161592] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 966.161592] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-574dcd84-95a8-46fc-9498-aeb6b6afcb62 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.178933] env[62522]: INFO nova.compute.manager [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Swapping old allocation on dict_keys(['c7fa38b2-245d-4337-a012-22c1a01c0a72']) held by migration 8cef14b7-9f7c-4125-955c-e7a909c91b4f for instance [ 966.189125] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 966.189125] env[62522]: value = "task-2415810" [ 966.189125] env[62522]: _type = "Task" [ 966.189125] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.189656] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9a93cef8-3523-470a-a539-4ffec97fabea tempest-ServerMetadataNegativeTestJSON-1695102246 tempest-ServerMetadataNegativeTestJSON-1695102246-project-member] Lock "0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.274s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.206350] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415810, 'name': CreateVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.214291] env[62522]: DEBUG nova.scheduler.client.report [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Overwriting current allocation {'allocations': {'c7fa38b2-245d-4337-a012-22c1a01c0a72': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 102}}, 'project_id': '91dee2b9e8bd456cbb55667383b0058d', 'user_id': '40504d2538e34ec2b02cc43b616aafbd', 'consumer_generation': 1} on consumer 74e52638-d284-4bd1-8cff-c7aca9426f75 {{(pid=62522) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 966.249367] env[62522]: DEBUG nova.compute.manager [req-e582c593-d929-49ec-b117-4cf3f40369cd req-b017bf34-eff4-4193-ad8f-ee2d28ddc33c service nova] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Received event network-changed-5ffc472e-5334-485a-9155-fed81971c096 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 966.249643] env[62522]: DEBUG nova.compute.manager [req-e582c593-d929-49ec-b117-4cf3f40369cd req-b017bf34-eff4-4193-ad8f-ee2d28ddc33c service nova] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Refreshing instance network info cache due to event network-changed-5ffc472e-5334-485a-9155-fed81971c096. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 966.249798] env[62522]: DEBUG oslo_concurrency.lockutils [req-e582c593-d929-49ec-b117-4cf3f40369cd req-b017bf34-eff4-4193-ad8f-ee2d28ddc33c service nova] Acquiring lock "refresh_cache-e1225c6f-9025-41ff-94fa-a55af49aeed2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.250037] env[62522]: DEBUG oslo_concurrency.lockutils [req-e582c593-d929-49ec-b117-4cf3f40369cd req-b017bf34-eff4-4193-ad8f-ee2d28ddc33c service nova] Acquired lock "refresh_cache-e1225c6f-9025-41ff-94fa-a55af49aeed2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.250182] env[62522]: DEBUG nova.network.neutron [req-e582c593-d929-49ec-b117-4cf3f40369cd req-b017bf34-eff4-4193-ad8f-ee2d28ddc33c service nova] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Refreshing network info cache for port 5ffc472e-5334-485a-9155-fed81971c096 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 966.328227] env[62522]: DEBUG nova.compute.manager [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 966.328529] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 966.329474] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2616ae5b-dacf-4fec-a572-a584c948dc4c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.339096] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 966.339208] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8f9c431d-4846-47cb-8077-46b1159efee2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.345322] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquiring lock "refresh_cache-74e52638-d284-4bd1-8cff-c7aca9426f75" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.345612] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquired lock "refresh_cache-74e52638-d284-4bd1-8cff-c7aca9426f75" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.345765] env[62522]: DEBUG nova.network.neutron [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 966.360947] env[62522]: DEBUG oslo_vmware.api [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the task: (returnval){ [ 966.360947] env[62522]: value = "task-2415811" [ 966.360947] env[62522]: _type = "Task" [ 966.360947] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.377896] env[62522]: DEBUG oslo_vmware.api [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415811, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.602943] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 95e4fe36-6830-4fc4-bb53-1e5643c2f95b] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 966.605034] env[62522]: DEBUG oslo_vmware.api [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415808, 'name': PowerOnVM_Task, 'duration_secs': 0.991999} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.609608] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 966.610315] env[62522]: INFO nova.compute.manager [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Took 9.58 seconds to spawn the instance on the hypervisor. [ 966.610315] env[62522]: DEBUG nova.compute.manager [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 966.612544] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-573e8a98-1cf8-4a8d-b8ca-9f9c72165246 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.620635] env[62522]: DEBUG oslo_vmware.api [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415809, 'name': PowerOnVM_Task, 'duration_secs': 0.49984} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.625044] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 966.625308] env[62522]: INFO nova.compute.manager [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Took 7.00 seconds to spawn the instance on the hypervisor. [ 966.626429] env[62522]: DEBUG nova.compute.manager [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 966.630284] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71322a82-88d0-4ca5-888a-d02a4e2a762a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.700997] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf6c616-f661-4646-aa24-e23eae6d126a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.709645] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415810, 'name': CreateVM_Task, 'duration_secs': 0.416969} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.711664] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 966.712428] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.712593] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.713118] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 966.714107] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c5c4b45-c262-49ce-8f43-2d0d726ff5c6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.717514] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1bdd8ba-a876-4b9f-b77c-3273515c8859 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.751537] env[62522]: DEBUG oslo_vmware.api [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 966.751537] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5266939a-5cc6-57c7-d799-d64816bc12da" [ 966.751537] env[62522]: _type = "Task" [ 966.751537] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.752637] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3507d82-49f3-4429-a472-a391b0742243 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.769360] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff369c84-2181-41fd-9b7b-9a793adcbeee {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.774036] env[62522]: DEBUG oslo_vmware.api [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5266939a-5cc6-57c7-d799-d64816bc12da, 'name': SearchDatastore_Task, 'duration_secs': 0.032825} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.774384] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.774624] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 966.774856] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.775044] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.775306] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 966.776284] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7eec8959-2070-4523-b1d9-84a5e2229ab0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.789295] env[62522]: DEBUG nova.compute.provider_tree [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 966.797388] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 966.797576] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 966.798350] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b039cb7-7d30-4e2f-9395-0adf69b0ec8e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.804260] env[62522]: DEBUG oslo_vmware.api [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 966.804260] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522e2628-8aaa-567d-cddd-ac699ee4c5dc" [ 966.804260] env[62522]: _type = "Task" [ 966.804260] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.814655] env[62522]: DEBUG oslo_vmware.api [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522e2628-8aaa-567d-cddd-ac699ee4c5dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.870687] env[62522]: DEBUG oslo_vmware.api [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415811, 'name': PowerOffVM_Task, 'duration_secs': 0.248751} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.870905] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 966.871098] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 966.871371] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0192e7a5-a2e7-4c4c-8159-2ffe51d892cd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.939667] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 966.939935] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 966.940138] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Deleting the datastore file [datastore2] 6ef27aee-719c-4089-825d-fc117e867bde {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 966.940438] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2dc4d505-113a-439e-ac72-f2ca16472f5f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.946893] env[62522]: DEBUG oslo_vmware.api [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for the task: (returnval){ [ 966.946893] env[62522]: value = "task-2415813" [ 966.946893] env[62522]: _type = "Task" [ 966.946893] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.957414] env[62522]: DEBUG oslo_vmware.api [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415813, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.111909] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: a804f755-58b2-4350-8726-4e82f60afcdc] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 967.140146] env[62522]: INFO nova.compute.manager [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Took 44.77 seconds to build instance. [ 967.147909] env[62522]: INFO nova.compute.manager [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Took 39.94 seconds to build instance. [ 967.161978] env[62522]: DEBUG nova.network.neutron [req-e582c593-d929-49ec-b117-4cf3f40369cd req-b017bf34-eff4-4193-ad8f-ee2d28ddc33c service nova] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Updated VIF entry in instance network info cache for port 5ffc472e-5334-485a-9155-fed81971c096. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 967.162396] env[62522]: DEBUG nova.network.neutron [req-e582c593-d929-49ec-b117-4cf3f40369cd req-b017bf34-eff4-4193-ad8f-ee2d28ddc33c service nova] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Updating instance_info_cache with network_info: [{"id": "5ffc472e-5334-485a-9155-fed81971c096", "address": "fa:16:3e:f5:8d:fe", "network": {"id": "d6a06fb0-929f-44b6-93c4-698be8498194", "bridge": "br-int", "label": "tempest-ImagesTestJSON-272550236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61314d3f0b9e4c368312e714a953e549", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "399f3826-705c-45f7-9fe0-3a08a945151a", "external-id": "nsx-vlan-transportzone-936", "segmentation_id": 936, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ffc472e-53", "ovs_interfaceid": "5ffc472e-5334-485a-9155-fed81971c096", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.293280] env[62522]: DEBUG nova.scheduler.client.report [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 967.320129] env[62522]: DEBUG oslo_vmware.api [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522e2628-8aaa-567d-cddd-ac699ee4c5dc, 'name': SearchDatastore_Task, 'duration_secs': 0.033641} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.321867] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6796268b-230e-441c-a2dc-7b4f0bac9aca {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.327825] env[62522]: DEBUG oslo_vmware.api [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 967.327825] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d15786-ff4b-e46d-6e8c-1f3eddf2bcd6" [ 967.327825] env[62522]: _type = "Task" [ 967.327825] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.340619] env[62522]: DEBUG oslo_vmware.api [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d15786-ff4b-e46d-6e8c-1f3eddf2bcd6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.359057] env[62522]: DEBUG nova.network.neutron [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Updating instance_info_cache with network_info: [{"id": "d830d64b-94fa-4bc8-a3e6-e45c4b0ae629", "address": "fa:16:3e:d4:80:4e", "network": {"id": "f70ccb8e-4404-4a88-b829-697b2ba58bb9", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.185", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c1482f918afe4561b7accd9759bb88ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd830d64b-94", "ovs_interfaceid": "d830d64b-94fa-4bc8-a3e6-e45c4b0ae629", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.461375] env[62522]: DEBUG oslo_vmware.api [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415813, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.617401] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: bf2ccaeb-610a-437b-be94-d3caefbe15c5] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 967.646556] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a8330812-0b42-46d4-8bcb-6a89d199b9f4 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.406s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.651367] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4780a7f2-6444-4034-a059-b18ac7246ec1 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lock "7e5fc552-748f-4569-bd61-c81a52bb46b0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.455s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.665692] env[62522]: DEBUG oslo_concurrency.lockutils [req-e582c593-d929-49ec-b117-4cf3f40369cd req-b017bf34-eff4-4193-ad8f-ee2d28ddc33c service nova] Releasing lock "refresh_cache-e1225c6f-9025-41ff-94fa-a55af49aeed2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 967.817736] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.175s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.819112] env[62522]: DEBUG oslo_concurrency.lockutils [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.838s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.820572] env[62522]: INFO nova.compute.claims [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 967.823984] env[62522]: DEBUG nova.compute.manager [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Stashing vm_state: active {{(pid=62522) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 967.842256] env[62522]: DEBUG oslo_vmware.api [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d15786-ff4b-e46d-6e8c-1f3eddf2bcd6, 'name': SearchDatastore_Task, 'duration_secs': 0.025051} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.842256] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 967.842256] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] e1225c6f-9025-41ff-94fa-a55af49aeed2/e1225c6f-9025-41ff-94fa-a55af49aeed2.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 967.842256] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a719cfce-bd55-4592-b0e1-2cdef9a5eb72 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.847407] env[62522]: DEBUG oslo_vmware.api [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 967.847407] env[62522]: value = "task-2415814" [ 967.847407] env[62522]: _type = "Task" [ 967.847407] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.853624] env[62522]: INFO nova.scheduler.client.report [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Deleted allocations for instance 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a [ 967.858045] env[62522]: DEBUG oslo_vmware.api [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415814, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.865026] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Releasing lock "refresh_cache-74e52638-d284-4bd1-8cff-c7aca9426f75" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 967.865026] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 967.865026] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2ca816b1-1c0d-4554-90c5-d5f7d6d29099 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.869423] env[62522]: DEBUG oslo_vmware.api [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 967.869423] env[62522]: value = "task-2415815" [ 967.869423] env[62522]: _type = "Task" [ 967.869423] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.880021] env[62522]: DEBUG oslo_vmware.api [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415815, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.949787] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 967.950079] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.950285] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 967.950469] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.950864] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.961695] env[62522]: INFO nova.compute.manager [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Terminating instance [ 967.968815] env[62522]: DEBUG oslo_vmware.api [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Task: {'id': task-2415813, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.602021} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.969301] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 967.969490] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 967.969671] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 967.969833] env[62522]: INFO nova.compute.manager [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Took 1.64 seconds to destroy the instance on the hypervisor. [ 967.970084] env[62522]: DEBUG oslo.service.loopingcall [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 967.970279] env[62522]: DEBUG nova.compute.manager [-] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 967.970370] env[62522]: DEBUG nova.network.neutron [-] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 968.121029] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 84ad5317-344d-44c1-9318-fa1574321296] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 968.358242] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.364169] env[62522]: DEBUG oslo_vmware.api [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415814, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.368885] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e72ee44a-7689-43db-b443-9f682fe532da tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Lock "72e054d2-79bb-4ef8-82d1-4e67ba0ef20a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.054s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 968.381329] env[62522]: DEBUG oslo_vmware.api [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415815, 'name': PowerOffVM_Task, 'duration_secs': 0.271772} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.381590] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 968.382470] env[62522]: DEBUG nova.virt.hardware [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:21:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1bf21d87-4ee8-4637-a3ba-85267d79b549',id=39,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1714837760',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 968.386176] env[62522]: DEBUG nova.virt.hardware [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 968.386176] env[62522]: DEBUG nova.virt.hardware [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 968.386176] env[62522]: DEBUG nova.virt.hardware [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 968.386176] env[62522]: DEBUG nova.virt.hardware [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 968.386176] env[62522]: DEBUG nova.virt.hardware [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 968.386176] env[62522]: DEBUG nova.virt.hardware [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 968.386176] env[62522]: DEBUG nova.virt.hardware [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 968.386176] env[62522]: DEBUG nova.virt.hardware [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 968.386176] env[62522]: DEBUG nova.virt.hardware [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 968.386176] env[62522]: DEBUG nova.virt.hardware [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 968.392872] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-64b08508-f7db-4daf-9788-dc41193b7519 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.411979] env[62522]: DEBUG oslo_vmware.api [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 968.411979] env[62522]: value = "task-2415816" [ 968.411979] env[62522]: _type = "Task" [ 968.411979] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.426153] env[62522]: DEBUG oslo_vmware.api [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415816, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.470808] env[62522]: DEBUG nova.compute.manager [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 968.470808] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 968.471988] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acc5d40e-7cae-4e8d-8445-f69bdaa08645 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.480334] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 968.480538] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-64e10c87-7a8c-43af-984a-735fd50ad769 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.487786] env[62522]: DEBUG oslo_vmware.api [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 968.487786] env[62522]: value = "task-2415817" [ 968.487786] env[62522]: _type = "Task" [ 968.487786] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.498908] env[62522]: DEBUG oslo_vmware.api [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415817, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.531122] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquiring lock "7e5fc552-748f-4569-bd61-c81a52bb46b0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.531398] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lock "7e5fc552-748f-4569-bd61-c81a52bb46b0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.531840] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquiring lock "7e5fc552-748f-4569-bd61-c81a52bb46b0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.532069] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lock "7e5fc552-748f-4569-bd61-c81a52bb46b0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.532480] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lock "7e5fc552-748f-4569-bd61-c81a52bb46b0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 968.534903] env[62522]: INFO nova.compute.manager [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Terminating instance [ 968.612791] env[62522]: DEBUG nova.compute.manager [req-e68321b5-06a1-4269-9b18-405fe598c339 req-cee86a00-1fac-4f39-883a-ee039c683ac6 service nova] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Received event network-vif-deleted-9d7170cd-1d24-4b21-84d5-6f67ba579199 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 968.613043] env[62522]: INFO nova.compute.manager [req-e68321b5-06a1-4269-9b18-405fe598c339 req-cee86a00-1fac-4f39-883a-ee039c683ac6 service nova] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Neutron deleted interface 9d7170cd-1d24-4b21-84d5-6f67ba579199; detaching it from the instance and deleting it from the info cache [ 968.613418] env[62522]: DEBUG nova.network.neutron [req-e68321b5-06a1-4269-9b18-405fe598c339 req-cee86a00-1fac-4f39-883a-ee039c683ac6 service nova] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 968.624803] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 9a098809-cc26-4210-b09e-b7825c406294] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 968.649901] env[62522]: DEBUG oslo_concurrency.lockutils [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Acquiring lock "ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.651599] env[62522]: DEBUG oslo_concurrency.lockutils [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Lock "ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.651599] env[62522]: DEBUG oslo_concurrency.lockutils [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Acquiring lock "ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.651599] env[62522]: DEBUG oslo_concurrency.lockutils [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Lock "ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.651599] env[62522]: DEBUG oslo_concurrency.lockutils [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Lock "ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 968.654907] env[62522]: INFO nova.compute.manager [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Terminating instance [ 968.858234] env[62522]: DEBUG oslo_vmware.api [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415814, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.531271} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.858546] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] e1225c6f-9025-41ff-94fa-a55af49aeed2/e1225c6f-9025-41ff-94fa-a55af49aeed2.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 968.858809] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 968.859107] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-70ca18fe-63bb-46b1-93ef-0fb1cfe85793 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.866744] env[62522]: DEBUG oslo_vmware.api [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 968.866744] env[62522]: value = "task-2415818" [ 968.866744] env[62522]: _type = "Task" [ 968.866744] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.874993] env[62522]: DEBUG oslo_vmware.api [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415818, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.923944] env[62522]: DEBUG oslo_vmware.api [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415816, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.966575] env[62522]: DEBUG nova.network.neutron [-] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 968.999413] env[62522]: DEBUG oslo_vmware.api [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415817, 'name': PowerOffVM_Task, 'duration_secs': 0.221083} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.999698] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 968.999867] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 969.000642] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f5f65bd0-e705-4c23-b84e-ab3ba35c1e23 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.041402] env[62522]: DEBUG nova.compute.manager [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 969.041766] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 969.042699] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0284e0b0-72f7-4a70-8aaf-d78802180e68 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.051769] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 969.052132] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a7dcf167-8d81-4972-9dcb-e5e5a1a38389 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.059069] env[62522]: DEBUG oslo_vmware.api [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 969.059069] env[62522]: value = "task-2415820" [ 969.059069] env[62522]: _type = "Task" [ 969.059069] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.072234] env[62522]: DEBUG oslo_vmware.api [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415820, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.091300] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 969.091928] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 969.091928] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Deleting the datastore file [datastore2] a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 969.092763] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2e0affb9-ebc9-4fc3-a3bc-669cc95c267c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.107784] env[62522]: DEBUG oslo_vmware.api [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 969.107784] env[62522]: value = "task-2415821" [ 969.107784] env[62522]: _type = "Task" [ 969.107784] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.119994] env[62522]: DEBUG oslo_vmware.api [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415821, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.122857] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2952bed2-4851-432a-ae1e-5d0aec49b00f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.131387] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0e73473-a280-4ebd-aaad-611884f89b80 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.145815] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: a5657a70-5374-4d52-be9a-2d05f9556d16] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 969.161372] env[62522]: DEBUG oslo_concurrency.lockutils [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Acquiring lock "refresh_cache-ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 969.161663] env[62522]: DEBUG oslo_concurrency.lockutils [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Acquired lock "refresh_cache-ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.161946] env[62522]: DEBUG nova.network.neutron [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 969.179094] env[62522]: DEBUG nova.compute.manager [req-e68321b5-06a1-4269-9b18-405fe598c339 req-cee86a00-1fac-4f39-883a-ee039c683ac6 service nova] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Detach interface failed, port_id=9d7170cd-1d24-4b21-84d5-6f67ba579199, reason: Instance 6ef27aee-719c-4089-825d-fc117e867bde could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 969.201463] env[62522]: DEBUG nova.network.neutron [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 969.261293] env[62522]: DEBUG nova.network.neutron [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.345793] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb377ce4-ea53-4f35-8461-b89d9844c0bf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.353557] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efec9617-205f-45d3-ab01-c22675429285 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.386458] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31d58d2e-471b-4332-ae89-ecf77b0139bd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.397918] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-250cca30-b4d7-410a-985e-5ad8f8037fb6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.401728] env[62522]: DEBUG oslo_vmware.api [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415818, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070186} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.402035] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 969.403165] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa82fe54-b3df-4bc0-b3a4-b98b57e59379 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.413889] env[62522]: DEBUG nova.compute.provider_tree [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 969.436148] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Reconfiguring VM instance instance-0000004c to attach disk [datastore2] e1225c6f-9025-41ff-94fa-a55af49aeed2/e1225c6f-9025-41ff-94fa-a55af49aeed2.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 969.437571] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f96d3fb5-647a-44c5-942a-fed4921257b3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.456184] env[62522]: DEBUG oslo_vmware.api [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415816, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.462110] env[62522]: DEBUG oslo_vmware.api [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 969.462110] env[62522]: value = "task-2415822" [ 969.462110] env[62522]: _type = "Task" [ 969.462110] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.470280] env[62522]: DEBUG oslo_vmware.api [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415822, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.471959] env[62522]: INFO nova.compute.manager [-] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Took 1.50 seconds to deallocate network for instance. [ 969.576247] env[62522]: DEBUG oslo_vmware.api [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415820, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.618176] env[62522]: DEBUG oslo_vmware.api [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415821, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.417746} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.618454] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 969.618666] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 969.619366] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 969.619366] env[62522]: INFO nova.compute.manager [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Took 1.15 seconds to destroy the instance on the hypervisor. [ 969.619366] env[62522]: DEBUG oslo.service.loopingcall [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 969.619520] env[62522]: DEBUG nova.compute.manager [-] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 969.619560] env[62522]: DEBUG nova.network.neutron [-] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 969.649236] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 678b6b5f-b410-4c55-872e-4a74da6d7ebc] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 969.765403] env[62522]: DEBUG oslo_concurrency.lockutils [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Releasing lock "refresh_cache-ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 969.765403] env[62522]: DEBUG nova.compute.manager [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 969.765403] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 969.765403] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68278c88-6d6a-48b5-81c2-213f7ae9b757 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.777028] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 969.777028] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-30317c1a-17ae-45b2-bd48-299e42e9dcb8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.781783] env[62522]: DEBUG oslo_vmware.api [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for the task: (returnval){ [ 969.781783] env[62522]: value = "task-2415823" [ 969.781783] env[62522]: _type = "Task" [ 969.781783] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.791073] env[62522]: DEBUG oslo_vmware.api [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415823, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.920545] env[62522]: DEBUG nova.scheduler.client.report [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 969.928450] env[62522]: DEBUG oslo_vmware.api [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415816, 'name': ReconfigVM_Task, 'duration_secs': 1.174666} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.929625] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29b25361-409d-4733-b53c-564f49da1f00 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.952118] env[62522]: DEBUG nova.virt.hardware [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:21:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1bf21d87-4ee8-4637-a3ba-85267d79b549',id=39,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1714837760',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 969.952376] env[62522]: DEBUG nova.virt.hardware [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 969.952533] env[62522]: DEBUG nova.virt.hardware [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 969.952761] env[62522]: DEBUG nova.virt.hardware [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 969.952969] env[62522]: DEBUG nova.virt.hardware [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 969.953145] env[62522]: DEBUG nova.virt.hardware [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 969.953352] env[62522]: DEBUG nova.virt.hardware [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 969.953508] env[62522]: DEBUG nova.virt.hardware [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 969.954758] env[62522]: DEBUG nova.virt.hardware [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 969.954992] env[62522]: DEBUG nova.virt.hardware [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 969.955199] env[62522]: DEBUG nova.virt.hardware [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 969.956443] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5f817b3-6ffe-4a07-b972-3e3d5e2dc770 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.962875] env[62522]: DEBUG oslo_vmware.api [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 969.962875] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e8da20-47a2-df37-4f58-f85626b2e646" [ 969.962875] env[62522]: _type = "Task" [ 969.962875] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.974126] env[62522]: DEBUG oslo_vmware.api [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415822, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.978408] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.978673] env[62522]: DEBUG oslo_vmware.api [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e8da20-47a2-df37-4f58-f85626b2e646, 'name': SearchDatastore_Task, 'duration_secs': 0.007386} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.983867] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Reconfiguring VM instance instance-00000037 to detach disk 2000 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 969.984123] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-57f938f3-a21b-4a36-abb4-5b5b8b295a05 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.004330] env[62522]: DEBUG oslo_vmware.api [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 970.004330] env[62522]: value = "task-2415824" [ 970.004330] env[62522]: _type = "Task" [ 970.004330] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.011924] env[62522]: DEBUG oslo_vmware.api [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415824, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.070904] env[62522]: DEBUG oslo_vmware.api [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415820, 'name': PowerOffVM_Task, 'duration_secs': 0.793416} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.071236] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 970.071474] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 970.071805] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b06a6b77-9917-441a-92e3-ed69859df2fd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.131479] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 970.132371] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 970.132588] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Deleting the datastore file [datastore2] 7e5fc552-748f-4569-bd61-c81a52bb46b0 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 970.132905] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8baf8439-c782-4231-aaa6-6c0ff50d9dd0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.139176] env[62522]: DEBUG oslo_vmware.api [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 970.139176] env[62522]: value = "task-2415826" [ 970.139176] env[62522]: _type = "Task" [ 970.139176] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.149369] env[62522]: DEBUG oslo_vmware.api [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415826, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.152977] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 3824a70e-8498-410a-904d-c7cd0de0c358] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 970.293191] env[62522]: DEBUG oslo_vmware.api [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415823, 'name': PowerOffVM_Task, 'duration_secs': 0.158911} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.293499] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 970.293697] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 970.293965] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3367282a-ce51-4f57-a05a-3228598a4240 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.317239] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 970.317522] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 970.317632] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Deleting the datastore file [datastore1] ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 970.317887] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bac814b4-1a57-4a98-89ec-0e5c51674c5e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.324151] env[62522]: DEBUG oslo_vmware.api [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for the task: (returnval){ [ 970.324151] env[62522]: value = "task-2415828" [ 970.324151] env[62522]: _type = "Task" [ 970.324151] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.331286] env[62522]: DEBUG oslo_vmware.api [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415828, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.397801] env[62522]: DEBUG nova.network.neutron [-] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.429857] env[62522]: DEBUG oslo_concurrency.lockutils [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.611s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.430408] env[62522]: DEBUG nova.compute.manager [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 970.433477] env[62522]: DEBUG oslo_concurrency.lockutils [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.228s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.434110] env[62522]: DEBUG nova.objects.instance [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Lazy-loading 'resources' on Instance uuid 17ec01e7-9735-4771-a73c-c4c7634d59f1 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 970.472428] env[62522]: DEBUG oslo_vmware.api [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415822, 'name': ReconfigVM_Task, 'duration_secs': 0.721708} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.472783] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Reconfigured VM instance instance-0000004c to attach disk [datastore2] e1225c6f-9025-41ff-94fa-a55af49aeed2/e1225c6f-9025-41ff-94fa-a55af49aeed2.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 970.473764] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a1824f16-75ef-4b18-83ac-b4c4140e188d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.480401] env[62522]: DEBUG oslo_vmware.api [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 970.480401] env[62522]: value = "task-2415829" [ 970.480401] env[62522]: _type = "Task" [ 970.480401] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.488833] env[62522]: DEBUG oslo_vmware.api [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415829, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.514602] env[62522]: DEBUG oslo_vmware.api [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415824, 'name': ReconfigVM_Task, 'duration_secs': 0.178111} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.514959] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Reconfigured VM instance instance-00000037 to detach disk 2000 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 970.515792] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bae2ef21-08ca-4c4c-9b05-302aa0e71f09 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.544993] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Reconfiguring VM instance instance-00000037 to attach disk [datastore2] 74e52638-d284-4bd1-8cff-c7aca9426f75/74e52638-d284-4bd1-8cff-c7aca9426f75.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 970.545351] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fb3644b0-b47d-43a6-b482-90207a9702f4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.576127] env[62522]: DEBUG oslo_vmware.api [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 970.576127] env[62522]: value = "task-2415830" [ 970.576127] env[62522]: _type = "Task" [ 970.576127] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.585254] env[62522]: DEBUG oslo_vmware.api [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415830, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.648930] env[62522]: DEBUG oslo_vmware.api [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415826, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157839} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.649276] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 970.649531] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 970.650148] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 970.650148] env[62522]: INFO nova.compute.manager [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Took 1.61 seconds to destroy the instance on the hypervisor. [ 970.650148] env[62522]: DEBUG oslo.service.loopingcall [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 970.650309] env[62522]: DEBUG nova.compute.manager [-] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 970.650399] env[62522]: DEBUG nova.network.neutron [-] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 970.659742] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 758ed671-347a-4949-9842-2f8cdcd261ae] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 970.842624] env[62522]: DEBUG oslo_vmware.api [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Task: {'id': task-2415828, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.089409} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.842995] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 970.843221] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 970.843402] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 970.843609] env[62522]: INFO nova.compute.manager [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Took 1.08 seconds to destroy the instance on the hypervisor. [ 970.843817] env[62522]: DEBUG oslo.service.loopingcall [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 970.844043] env[62522]: DEBUG nova.compute.manager [-] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 970.844145] env[62522]: DEBUG nova.network.neutron [-] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 970.870536] env[62522]: DEBUG nova.network.neutron [-] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 970.877029] env[62522]: DEBUG nova.compute.manager [req-cdcac0b1-53ad-4c97-8a4f-1e92313c31c9 req-79bee2b0-4fd4-4d78-baf9-2e10a6ed604e service nova] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Received event network-vif-deleted-2692fd0a-af0d-49dd-818a-fe819914252b {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 970.900758] env[62522]: INFO nova.compute.manager [-] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Took 1.28 seconds to deallocate network for instance. [ 970.939756] env[62522]: DEBUG nova.compute.utils [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 970.941245] env[62522]: DEBUG nova.compute.manager [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 970.941388] env[62522]: DEBUG nova.network.neutron [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 970.992969] env[62522]: DEBUG oslo_vmware.api [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415829, 'name': Rename_Task, 'duration_secs': 0.215517} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.995772] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 970.996578] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-00c619c2-dc61-4240-9389-fe717fe74663 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.005242] env[62522]: DEBUG oslo_vmware.api [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 971.005242] env[62522]: value = "task-2415831" [ 971.005242] env[62522]: _type = "Task" [ 971.005242] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.014308] env[62522]: DEBUG oslo_vmware.api [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415831, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.030980] env[62522]: DEBUG nova.policy [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c7a901dd2575462f9369f3d8819fb86d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82346c440c3343a0a5c233a48203a13c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 971.092989] env[62522]: DEBUG oslo_vmware.api [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415830, 'name': ReconfigVM_Task, 'duration_secs': 0.28607} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.093276] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Reconfigured VM instance instance-00000037 to attach disk [datastore2] 74e52638-d284-4bd1-8cff-c7aca9426f75/74e52638-d284-4bd1-8cff-c7aca9426f75.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 971.094580] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b6af1b5-0abb-4bd0-8fb9-b6c40636be60 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.124544] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68e47215-8c19-4d71-9d5c-e892f92e987f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.146806] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-543a3362-77ba-43c6-aa70-046fed849ca9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.171143] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 74b6ae10-a595-4139-8eda-38fe1aa298cf] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 971.176340] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ae4b10-728e-4430-b9f9-0a2d58934210 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.183315] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 971.183656] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-773bc647-60dc-4c63-aecd-747df80e8e5b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.190273] env[62522]: DEBUG oslo_vmware.api [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 971.190273] env[62522]: value = "task-2415832" [ 971.190273] env[62522]: _type = "Task" [ 971.190273] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.202917] env[62522]: DEBUG oslo_vmware.api [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415832, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.372938] env[62522]: DEBUG nova.network.neutron [-] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.403395] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e13fbeb9-7848-4962-89cd-875e2ad8935a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.412552] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.415743] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7313295b-836a-478e-9c19-47c7050c1dbd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.451724] env[62522]: DEBUG nova.compute.manager [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 971.454668] env[62522]: DEBUG nova.network.neutron [-] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.457524] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b81685-78ce-4103-b71f-3a387f42ad46 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.469008] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d5294cf-7c36-45db-962b-c0ce233822eb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.486521] env[62522]: DEBUG nova.compute.provider_tree [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 971.504984] env[62522]: DEBUG nova.network.neutron [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Successfully created port: 661819ce-17f6-47b5-a704-1c8c43e50373 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 971.519792] env[62522]: DEBUG oslo_vmware.api [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415831, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.681742] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 7828f9c8-fc02-4218-ba93-5362af807dad] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 971.702022] env[62522]: DEBUG oslo_vmware.api [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415832, 'name': PowerOnVM_Task, 'duration_secs': 0.419969} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.702388] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 971.874447] env[62522]: INFO nova.compute.manager [-] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Took 1.03 seconds to deallocate network for instance. [ 971.964409] env[62522]: INFO nova.compute.manager [-] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Took 1.31 seconds to deallocate network for instance. [ 971.989426] env[62522]: DEBUG nova.scheduler.client.report [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 972.017160] env[62522]: DEBUG oslo_vmware.api [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415831, 'name': PowerOnVM_Task, 'duration_secs': 0.6725} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.017428] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 972.017624] env[62522]: INFO nova.compute.manager [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Took 8.55 seconds to spawn the instance on the hypervisor. [ 972.017802] env[62522]: DEBUG nova.compute.manager [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 972.018576] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9c49f79-2321-44b6-95c5-c9354238913d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.184113] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: a3830103-2dcb-40ac-8e62-b331fe4673ff] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 972.381039] env[62522]: DEBUG oslo_concurrency.lockutils [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.466884] env[62522]: DEBUG nova.compute.manager [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 972.470875] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.493254] env[62522]: DEBUG nova.virt.hardware [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 972.493503] env[62522]: DEBUG nova.virt.hardware [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 972.493660] env[62522]: DEBUG nova.virt.hardware [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 972.493844] env[62522]: DEBUG nova.virt.hardware [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 972.494012] env[62522]: DEBUG nova.virt.hardware [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 972.494193] env[62522]: DEBUG nova.virt.hardware [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 972.494409] env[62522]: DEBUG nova.virt.hardware [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 972.494570] env[62522]: DEBUG nova.virt.hardware [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 972.494736] env[62522]: DEBUG nova.virt.hardware [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 972.494898] env[62522]: DEBUG nova.virt.hardware [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 972.495085] env[62522]: DEBUG nova.virt.hardware [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 972.495942] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70ff4cd7-0e9f-4a8a-bbe5-18caf4d35b43 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.499879] env[62522]: DEBUG oslo_concurrency.lockutils [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.066s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.501536] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.084s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.501742] env[62522]: DEBUG nova.objects.instance [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Lazy-loading 'resources' on Instance uuid e813e7da-fd2c-4f10-b2f3-1e2b5c153a19 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 972.508615] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-257027c8-3644-4fbb-9059-b2f58c10b1c0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.525107] env[62522]: INFO nova.scheduler.client.report [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Deleted allocations for instance 17ec01e7-9735-4771-a73c-c4c7634d59f1 [ 972.535988] env[62522]: INFO nova.compute.manager [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Took 41.86 seconds to build instance. [ 972.686591] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: c8779822-1694-463e-bd06-5f84d867d1bd] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 972.712941] env[62522]: INFO nova.compute.manager [None req-9e35ae3a-b680-4091-b677-d273f29e8418 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Updating instance to original state: 'active' [ 972.903218] env[62522]: DEBUG nova.compute.manager [req-9bbe55a5-7b10-4e2d-964f-b4f79f68c8b1 req-b7ccfaac-75a2-43d3-bc66-0a7afdb58ae0 service nova] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Received event network-vif-deleted-f02543d0-0c93-4b22-b8b1-b3a08a67ba0c {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 972.935929] env[62522]: DEBUG nova.compute.manager [req-b75c0c83-55d8-4a55-b3c9-45ece2bf6d8c req-dad39f1e-fcd7-40d1-ad44-b8846345f021 service nova] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Received event network-vif-plugged-661819ce-17f6-47b5-a704-1c8c43e50373 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 972.937624] env[62522]: DEBUG oslo_concurrency.lockutils [req-b75c0c83-55d8-4a55-b3c9-45ece2bf6d8c req-dad39f1e-fcd7-40d1-ad44-b8846345f021 service nova] Acquiring lock "7f8a8270-5014-446c-aa42-ea0b4079e5a9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.937624] env[62522]: DEBUG oslo_concurrency.lockutils [req-b75c0c83-55d8-4a55-b3c9-45ece2bf6d8c req-dad39f1e-fcd7-40d1-ad44-b8846345f021 service nova] Lock "7f8a8270-5014-446c-aa42-ea0b4079e5a9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.937624] env[62522]: DEBUG oslo_concurrency.lockutils [req-b75c0c83-55d8-4a55-b3c9-45ece2bf6d8c req-dad39f1e-fcd7-40d1-ad44-b8846345f021 service nova] Lock "7f8a8270-5014-446c-aa42-ea0b4079e5a9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.937624] env[62522]: DEBUG nova.compute.manager [req-b75c0c83-55d8-4a55-b3c9-45ece2bf6d8c req-dad39f1e-fcd7-40d1-ad44-b8846345f021 service nova] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] No waiting events found dispatching network-vif-plugged-661819ce-17f6-47b5-a704-1c8c43e50373 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 972.937624] env[62522]: WARNING nova.compute.manager [req-b75c0c83-55d8-4a55-b3c9-45ece2bf6d8c req-dad39f1e-fcd7-40d1-ad44-b8846345f021 service nova] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Received unexpected event network-vif-plugged-661819ce-17f6-47b5-a704-1c8c43e50373 for instance with vm_state building and task_state spawning. [ 973.036911] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2ceb3c49-18ed-41ce-979a-d641c25d86dd tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "e1225c6f-9025-41ff-94fa-a55af49aeed2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.969s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.040057] env[62522]: DEBUG oslo_concurrency.lockutils [None req-08610fde-f198-44da-bd11-572e6064573f tempest-ServersTestMultiNic-58410992 tempest-ServersTestMultiNic-58410992-project-member] Lock "17ec01e7-9735-4771-a73c-c4c7634d59f1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.669s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.121957] env[62522]: DEBUG nova.network.neutron [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Successfully updated port: 661819ce-17f6-47b5-a704-1c8c43e50373 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 973.191572] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 973.192202] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Cleaning up deleted instances with incomplete migration {{(pid=62522) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11783}} [ 973.359587] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a52ef26e-b2b8-47ed-ba66-92511dda3510 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.367672] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79631c55-8ebc-4117-9b03-dd4f4753a9d9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.404046] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-083f568c-388a-4eb7-9058-166110c86104 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.409616] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a914243-8098-4245-8ec9-9d18a3f3cc56 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.425769] env[62522]: DEBUG nova.compute.provider_tree [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 973.631279] env[62522]: DEBUG oslo_concurrency.lockutils [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "refresh_cache-7f8a8270-5014-446c-aa42-ea0b4079e5a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 973.631338] env[62522]: DEBUG oslo_concurrency.lockutils [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquired lock "refresh_cache-7f8a8270-5014-446c-aa42-ea0b4079e5a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.631507] env[62522]: DEBUG nova.network.neutron [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 973.694231] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 973.929069] env[62522]: DEBUG nova.scheduler.client.report [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 974.046299] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d89ddf3f-5c5b-48cd-a383-afd87d13fca8 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquiring lock "74e52638-d284-4bd1-8cff-c7aca9426f75" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 974.046560] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d89ddf3f-5c5b-48cd-a383-afd87d13fca8 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "74e52638-d284-4bd1-8cff-c7aca9426f75" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.046770] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d89ddf3f-5c5b-48cd-a383-afd87d13fca8 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquiring lock "74e52638-d284-4bd1-8cff-c7aca9426f75-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 974.046948] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d89ddf3f-5c5b-48cd-a383-afd87d13fca8 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "74e52638-d284-4bd1-8cff-c7aca9426f75-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.047130] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d89ddf3f-5c5b-48cd-a383-afd87d13fca8 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "74e52638-d284-4bd1-8cff-c7aca9426f75-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.049361] env[62522]: INFO nova.compute.manager [None req-d89ddf3f-5c5b-48cd-a383-afd87d13fca8 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Terminating instance [ 974.161673] env[62522]: DEBUG nova.network.neutron [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 974.336583] env[62522]: DEBUG nova.network.neutron [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Updating instance_info_cache with network_info: [{"id": "661819ce-17f6-47b5-a704-1c8c43e50373", "address": "fa:16:3e:60:76:34", "network": {"id": "2a4f6f01-ad28-4f8f-b835-ea86e1791f49", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1577320995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82346c440c3343a0a5c233a48203a13c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap661819ce-17", "ovs_interfaceid": "661819ce-17f6-47b5-a704-1c8c43e50373", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.434546] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.933s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.437517] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.509s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.437817] env[62522]: DEBUG nova.objects.instance [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Lazy-loading 'resources' on Instance uuid e60d5286-04dd-42bb-ae50-26b0a763d2bc {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 974.462777] env[62522]: INFO nova.scheduler.client.report [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Deleted allocations for instance e813e7da-fd2c-4f10-b2f3-1e2b5c153a19 [ 974.553538] env[62522]: DEBUG nova.compute.manager [None req-d89ddf3f-5c5b-48cd-a383-afd87d13fca8 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 974.553796] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d89ddf3f-5c5b-48cd-a383-afd87d13fca8 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 974.555058] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b0cf787-2afc-4e0e-9f61-88d541dbd285 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.563817] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d89ddf3f-5c5b-48cd-a383-afd87d13fca8 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 974.564081] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-94f6fc1d-6da0-4405-9243-e2759fc5df15 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.576632] env[62522]: DEBUG oslo_vmware.api [None req-d89ddf3f-5c5b-48cd-a383-afd87d13fca8 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 974.576632] env[62522]: value = "task-2415833" [ 974.576632] env[62522]: _type = "Task" [ 974.576632] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.585464] env[62522]: DEBUG oslo_vmware.api [None req-d89ddf3f-5c5b-48cd-a383-afd87d13fca8 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415833, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.791737] env[62522]: DEBUG nova.compute.manager [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 974.792652] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddb742bd-8b5d-416a-8dea-4c67d3bfac0c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.841018] env[62522]: DEBUG oslo_concurrency.lockutils [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Releasing lock "refresh_cache-7f8a8270-5014-446c-aa42-ea0b4079e5a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.841018] env[62522]: DEBUG nova.compute.manager [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Instance network_info: |[{"id": "661819ce-17f6-47b5-a704-1c8c43e50373", "address": "fa:16:3e:60:76:34", "network": {"id": "2a4f6f01-ad28-4f8f-b835-ea86e1791f49", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1577320995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82346c440c3343a0a5c233a48203a13c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap661819ce-17", "ovs_interfaceid": "661819ce-17f6-47b5-a704-1c8c43e50373", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 974.841018] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:76:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '193994c7-8e1b-4f25-a4a4-d0563845eb28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '661819ce-17f6-47b5-a704-1c8c43e50373', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 974.848946] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Creating folder: Project (82346c440c3343a0a5c233a48203a13c). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 974.849655] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3a086397-ec5e-42e2-bb24-4661ef0aaa32 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.862297] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Created folder: Project (82346c440c3343a0a5c233a48203a13c) in parent group-v489562. [ 974.862297] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Creating folder: Instances. Parent ref: group-v489767. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 974.862297] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bdebdb91-355a-40f5-b883-4088789a6b64 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.872018] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Created folder: Instances in parent group-v489767. [ 974.872018] env[62522]: DEBUG oslo.service.loopingcall [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 974.872018] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 974.872018] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b1049077-c876-428f-ac94-9ed0fcda2333 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.893454] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 974.893454] env[62522]: value = "task-2415836" [ 974.893454] env[62522]: _type = "Task" [ 974.893454] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.902508] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415836, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.973174] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cda90cc7-7cfa-4b60-8225-92155d666e51 tempest-VolumesAdminNegativeTest-703513066 tempest-VolumesAdminNegativeTest-703513066-project-member] Lock "e813e7da-fd2c-4f10-b2f3-1e2b5c153a19" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.398s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.988356] env[62522]: DEBUG nova.compute.manager [req-d7f3d3a2-0dcc-493d-a3d0-92acb1be15e5 req-9beef9e0-ab29-482e-9808-5f3adc6da7da service nova] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Received event network-changed-661819ce-17f6-47b5-a704-1c8c43e50373 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 974.988558] env[62522]: DEBUG nova.compute.manager [req-d7f3d3a2-0dcc-493d-a3d0-92acb1be15e5 req-9beef9e0-ab29-482e-9808-5f3adc6da7da service nova] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Refreshing instance network info cache due to event network-changed-661819ce-17f6-47b5-a704-1c8c43e50373. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 974.988766] env[62522]: DEBUG oslo_concurrency.lockutils [req-d7f3d3a2-0dcc-493d-a3d0-92acb1be15e5 req-9beef9e0-ab29-482e-9808-5f3adc6da7da service nova] Acquiring lock "refresh_cache-7f8a8270-5014-446c-aa42-ea0b4079e5a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.989070] env[62522]: DEBUG oslo_concurrency.lockutils [req-d7f3d3a2-0dcc-493d-a3d0-92acb1be15e5 req-9beef9e0-ab29-482e-9808-5f3adc6da7da service nova] Acquired lock "refresh_cache-7f8a8270-5014-446c-aa42-ea0b4079e5a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.989070] env[62522]: DEBUG nova.network.neutron [req-d7f3d3a2-0dcc-493d-a3d0-92acb1be15e5 req-9beef9e0-ab29-482e-9808-5f3adc6da7da service nova] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Refreshing network info cache for port 661819ce-17f6-47b5-a704-1c8c43e50373 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 975.089747] env[62522]: DEBUG oslo_vmware.api [None req-d89ddf3f-5c5b-48cd-a383-afd87d13fca8 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415833, 'name': PowerOffVM_Task, 'duration_secs': 0.191293} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.090059] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d89ddf3f-5c5b-48cd-a383-afd87d13fca8 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 975.090197] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d89ddf3f-5c5b-48cd-a383-afd87d13fca8 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 975.090452] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d64d0f3e-cba5-4416-a111-a2ad518ecbcf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.153148] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d89ddf3f-5c5b-48cd-a383-afd87d13fca8 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 975.153311] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d89ddf3f-5c5b-48cd-a383-afd87d13fca8 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 975.153497] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-d89ddf3f-5c5b-48cd-a383-afd87d13fca8 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Deleting the datastore file [datastore2] 74e52638-d284-4bd1-8cff-c7aca9426f75 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 975.153768] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b28075f4-6a40-487f-a392-8cff389dda25 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.159846] env[62522]: DEBUG oslo_vmware.api [None req-d89ddf3f-5c5b-48cd-a383-afd87d13fca8 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 975.159846] env[62522]: value = "task-2415838" [ 975.159846] env[62522]: _type = "Task" [ 975.159846] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.170191] env[62522]: DEBUG oslo_vmware.api [None req-d89ddf3f-5c5b-48cd-a383-afd87d13fca8 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415838, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.271424] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-689a8132-d749-4b89-a60a-410d52713dbd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.279046] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb612467-4b90-4f9c-ade7-1fa3f6bcbc28 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.310664] env[62522]: INFO nova.compute.manager [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] instance snapshotting [ 975.313049] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0239bbde-6e34-46af-a8b5-887476519b27 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.316445] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5ba350e-b8dc-4e74-9fba-01e0cd92eeab {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.338533] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b124a386-8531-44f9-af73-80cb28fe8b61 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.343052] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c0e3e38-e37f-47dd-80a4-555c801b4511 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.355586] env[62522]: DEBUG nova.compute.provider_tree [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 975.404353] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415836, 'name': CreateVM_Task, 'duration_secs': 0.328397} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.404527] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 975.405224] env[62522]: DEBUG oslo_concurrency.lockutils [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.405392] env[62522]: DEBUG oslo_concurrency.lockutils [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.405715] env[62522]: DEBUG oslo_concurrency.lockutils [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 975.405960] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b64a7d1-7b4f-4587-9cb7-46429699d13e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.411058] env[62522]: DEBUG oslo_vmware.api [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 975.411058] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52cbfcca-f87b-65cc-72fe-773ac301bc16" [ 975.411058] env[62522]: _type = "Task" [ 975.411058] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.419737] env[62522]: DEBUG oslo_vmware.api [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52cbfcca-f87b-65cc-72fe-773ac301bc16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.672077] env[62522]: DEBUG oslo_vmware.api [None req-d89ddf3f-5c5b-48cd-a383-afd87d13fca8 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415838, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150329} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.672345] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-d89ddf3f-5c5b-48cd-a383-afd87d13fca8 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 975.672530] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d89ddf3f-5c5b-48cd-a383-afd87d13fca8 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 975.672706] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d89ddf3f-5c5b-48cd-a383-afd87d13fca8 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 975.672892] env[62522]: INFO nova.compute.manager [None req-d89ddf3f-5c5b-48cd-a383-afd87d13fca8 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Took 1.12 seconds to destroy the instance on the hypervisor. [ 975.673152] env[62522]: DEBUG oslo.service.loopingcall [None req-d89ddf3f-5c5b-48cd-a383-afd87d13fca8 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 975.673348] env[62522]: DEBUG nova.compute.manager [-] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 975.673444] env[62522]: DEBUG nova.network.neutron [-] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 975.867843] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Creating Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 975.868677] env[62522]: DEBUG nova.scheduler.client.report [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 975.872744] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c521bc7f-e093-4eec-99b5-e66143100297 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.881960] env[62522]: DEBUG oslo_vmware.api [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 975.881960] env[62522]: value = "task-2415839" [ 975.881960] env[62522]: _type = "Task" [ 975.881960] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.894550] env[62522]: DEBUG oslo_vmware.api [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415839, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.916014] env[62522]: DEBUG nova.network.neutron [req-d7f3d3a2-0dcc-493d-a3d0-92acb1be15e5 req-9beef9e0-ab29-482e-9808-5f3adc6da7da service nova] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Updated VIF entry in instance network info cache for port 661819ce-17f6-47b5-a704-1c8c43e50373. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 975.916419] env[62522]: DEBUG nova.network.neutron [req-d7f3d3a2-0dcc-493d-a3d0-92acb1be15e5 req-9beef9e0-ab29-482e-9808-5f3adc6da7da service nova] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Updating instance_info_cache with network_info: [{"id": "661819ce-17f6-47b5-a704-1c8c43e50373", "address": "fa:16:3e:60:76:34", "network": {"id": "2a4f6f01-ad28-4f8f-b835-ea86e1791f49", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1577320995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82346c440c3343a0a5c233a48203a13c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap661819ce-17", "ovs_interfaceid": "661819ce-17f6-47b5-a704-1c8c43e50373", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.926822] env[62522]: DEBUG oslo_vmware.api [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52cbfcca-f87b-65cc-72fe-773ac301bc16, 'name': SearchDatastore_Task, 'duration_secs': 0.009155} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.927135] env[62522]: DEBUG oslo_concurrency.lockutils [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.930224] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 975.930224] env[62522]: DEBUG oslo_concurrency.lockutils [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.930224] env[62522]: DEBUG oslo_concurrency.lockutils [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.930224] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 975.930224] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c859be2-8a7d-40be-831c-01b0b3e84726 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.938254] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 975.938435] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 975.939222] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a3b99f8-a8b7-4696-8c0a-847f3214fcd5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.947328] env[62522]: DEBUG oslo_vmware.api [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 975.947328] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5263d600-8823-f5c3-c0ff-d1a8a4895a14" [ 975.947328] env[62522]: _type = "Task" [ 975.947328] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.953637] env[62522]: DEBUG oslo_vmware.api [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5263d600-8823-f5c3-c0ff-d1a8a4895a14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.986509] env[62522]: DEBUG nova.compute.manager [req-1db58850-a5e3-4482-81ec-9d231859a7fe req-7f68c39e-5753-4319-b4be-89c3219b08a0 service nova] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Received event network-vif-deleted-d830d64b-94fa-4bc8-a3e6-e45c4b0ae629 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 975.986729] env[62522]: INFO nova.compute.manager [req-1db58850-a5e3-4482-81ec-9d231859a7fe req-7f68c39e-5753-4319-b4be-89c3219b08a0 service nova] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Neutron deleted interface d830d64b-94fa-4bc8-a3e6-e45c4b0ae629; detaching it from the instance and deleting it from the info cache [ 975.986928] env[62522]: DEBUG nova.network.neutron [req-1db58850-a5e3-4482-81ec-9d231859a7fe req-7f68c39e-5753-4319-b4be-89c3219b08a0 service nova] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.376322] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.939s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.378693] env[62522]: DEBUG oslo_concurrency.lockutils [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.987s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.384018] env[62522]: INFO nova.compute.claims [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 976.394746] env[62522]: DEBUG oslo_vmware.api [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415839, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.404534] env[62522]: INFO nova.scheduler.client.report [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Deleted allocations for instance e60d5286-04dd-42bb-ae50-26b0a763d2bc [ 976.419136] env[62522]: DEBUG oslo_concurrency.lockutils [req-d7f3d3a2-0dcc-493d-a3d0-92acb1be15e5 req-9beef9e0-ab29-482e-9808-5f3adc6da7da service nova] Releasing lock "refresh_cache-7f8a8270-5014-446c-aa42-ea0b4079e5a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 976.456738] env[62522]: DEBUG oslo_vmware.api [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5263d600-8823-f5c3-c0ff-d1a8a4895a14, 'name': SearchDatastore_Task, 'duration_secs': 0.008347} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.458101] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5ac23ba-81d1-4dfa-9ea9-18796f3f8e39 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.464252] env[62522]: DEBUG oslo_vmware.api [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 976.464252] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a1f87b-4e07-815b-da26-3d69e728ba0d" [ 976.464252] env[62522]: _type = "Task" [ 976.464252] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.468406] env[62522]: DEBUG nova.network.neutron [-] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.475151] env[62522]: DEBUG oslo_vmware.api [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a1f87b-4e07-815b-da26-3d69e728ba0d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.489375] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6243c022-ecc9-408c-acb3-ef94bbdca5db {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.498779] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a729a773-45ad-4d6f-afe9-288d3de4d2f8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.532296] env[62522]: DEBUG nova.compute.manager [req-1db58850-a5e3-4482-81ec-9d231859a7fe req-7f68c39e-5753-4319-b4be-89c3219b08a0 service nova] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Detach interface failed, port_id=d830d64b-94fa-4bc8-a3e6-e45c4b0ae629, reason: Instance 74e52638-d284-4bd1-8cff-c7aca9426f75 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 976.900896] env[62522]: DEBUG oslo_vmware.api [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415839, 'name': CreateSnapshot_Task, 'duration_secs': 0.947225} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.900896] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Created Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 976.900896] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26c2d46c-a290-4e33-a026-48f890b78cd5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.918541] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0f7568cd-15c2-4cee-a575-75bcfbe5ed56 tempest-ServerGroupTestJSON-896914229 tempest-ServerGroupTestJSON-896914229-project-member] Lock "e60d5286-04dd-42bb-ae50-26b0a763d2bc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.676s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.973564] env[62522]: INFO nova.compute.manager [-] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Took 1.30 seconds to deallocate network for instance. [ 976.985599] env[62522]: DEBUG oslo_vmware.api [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a1f87b-4e07-815b-da26-3d69e728ba0d, 'name': SearchDatastore_Task, 'duration_secs': 0.009756} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.986234] env[62522]: DEBUG oslo_concurrency.lockutils [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 976.986813] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 7f8a8270-5014-446c-aa42-ea0b4079e5a9/7f8a8270-5014-446c-aa42-ea0b4079e5a9.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 976.987998] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4050b223-3b9f-44b0-81ff-e249648695dc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.995833] env[62522]: DEBUG oslo_vmware.api [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 976.995833] env[62522]: value = "task-2415840" [ 976.995833] env[62522]: _type = "Task" [ 976.995833] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.005710] env[62522]: DEBUG oslo_vmware.api [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2415840, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.430307] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Creating linked-clone VM from snapshot {{(pid=62522) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 977.434823] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-186304de-9a14-4b8c-9559-e3c7df3d8bcc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.448024] env[62522]: DEBUG oslo_vmware.api [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 977.448024] env[62522]: value = "task-2415841" [ 977.448024] env[62522]: _type = "Task" [ 977.448024] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.461781] env[62522]: DEBUG oslo_vmware.api [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415841, 'name': CloneVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.488457] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d89ddf3f-5c5b-48cd-a383-afd87d13fca8 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.509211] env[62522]: DEBUG oslo_vmware.api [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2415840, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.478066} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.509860] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 7f8a8270-5014-446c-aa42-ea0b4079e5a9/7f8a8270-5014-446c-aa42-ea0b4079e5a9.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 977.509860] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 977.510030] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4e9b5891-bdb4-40bc-971e-e291fb75dbb4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.518285] env[62522]: DEBUG oslo_vmware.api [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 977.518285] env[62522]: value = "task-2415842" [ 977.518285] env[62522]: _type = "Task" [ 977.518285] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.526808] env[62522]: DEBUG oslo_vmware.api [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2415842, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.749581] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7a699a4-2090-4f1e-9160-7d3e6c533d4e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.759245] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6422641f-849d-4048-a613-8246969b0006 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.797359] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55fa9f41-e468-4729-81d4-c38db3412862 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.808023] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89712a17-896b-465b-b05d-a1ef103c9f43 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.822107] env[62522]: DEBUG nova.compute.provider_tree [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 977.956862] env[62522]: DEBUG oslo_vmware.api [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415841, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.029377] env[62522]: DEBUG oslo_vmware.api [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2415842, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069503} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.029693] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 978.030503] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5de8412e-6521-4aae-8459-0dd8f70a3371 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.055493] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] 7f8a8270-5014-446c-aa42-ea0b4079e5a9/7f8a8270-5014-446c-aa42-ea0b4079e5a9.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 978.056184] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-862f3c04-dcff-4fb8-ba16-47e2a46a9ead {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.076343] env[62522]: DEBUG oslo_vmware.api [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 978.076343] env[62522]: value = "task-2415843" [ 978.076343] env[62522]: _type = "Task" [ 978.076343] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.086332] env[62522]: DEBUG oslo_vmware.api [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2415843, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.325416] env[62522]: DEBUG nova.scheduler.client.report [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 978.457494] env[62522]: DEBUG oslo_vmware.api [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415841, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.589197] env[62522]: DEBUG oslo_vmware.api [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2415843, 'name': ReconfigVM_Task, 'duration_secs': 0.312174} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.589899] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Reconfigured VM instance instance-0000004d to attach disk [datastore1] 7f8a8270-5014-446c-aa42-ea0b4079e5a9/7f8a8270-5014-446c-aa42-ea0b4079e5a9.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 978.590697] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1b33f6a4-54bf-466d-9869-d5cf1f7c1eae {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.601154] env[62522]: DEBUG oslo_vmware.api [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 978.601154] env[62522]: value = "task-2415844" [ 978.601154] env[62522]: _type = "Task" [ 978.601154] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.608406] env[62522]: DEBUG oslo_vmware.api [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2415844, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.836489] env[62522]: DEBUG oslo_concurrency.lockutils [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.454s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.836489] env[62522]: DEBUG nova.compute.manager [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 978.838694] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 10.481s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.958604] env[62522]: DEBUG oslo_vmware.api [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415841, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.113211] env[62522]: DEBUG oslo_vmware.api [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2415844, 'name': Rename_Task, 'duration_secs': 0.160613} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.114394] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 979.114394] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-194d730d-311e-4a45-b226-f1eef3efbd05 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.120711] env[62522]: DEBUG oslo_vmware.api [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 979.120711] env[62522]: value = "task-2415845" [ 979.120711] env[62522]: _type = "Task" [ 979.120711] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.131181] env[62522]: DEBUG oslo_vmware.api [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2415845, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.339685] env[62522]: DEBUG nova.compute.utils [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 979.341176] env[62522]: DEBUG nova.compute.manager [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 979.341336] env[62522]: DEBUG nova.network.neutron [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 979.347050] env[62522]: INFO nova.compute.claims [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 979.421034] env[62522]: DEBUG nova.policy [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '04863cd0f6ce4256a3e1d893093cf94f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '247c239f49c8441fb723ca1b25fab349', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 979.460376] env[62522]: DEBUG oslo_vmware.api [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415841, 'name': CloneVM_Task} progress is 95%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.641348] env[62522]: DEBUG oslo_vmware.api [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2415845, 'name': PowerOnVM_Task, 'duration_secs': 0.462307} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.641690] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 979.641969] env[62522]: INFO nova.compute.manager [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Took 7.17 seconds to spawn the instance on the hypervisor. [ 979.642194] env[62522]: DEBUG nova.compute.manager [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 979.643140] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a252f9ac-6bd2-42fa-bac5-f25126d6632a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.833715] env[62522]: DEBUG nova.network.neutron [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Successfully created port: bbfabacf-12e0-47bc-9bba-9bc066142dcd {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 979.844484] env[62522]: DEBUG nova.compute.manager [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 979.853233] env[62522]: INFO nova.compute.resource_tracker [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Updating resource usage from migration e03fc413-2c2e-4a7d-9c2e-0f29ad86ac13 [ 979.961268] env[62522]: DEBUG oslo_vmware.api [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415841, 'name': CloneVM_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.169586] env[62522]: INFO nova.compute.manager [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Took 34.20 seconds to build instance. [ 980.275991] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e85a0b4d-019c-435e-97af-4bb2f4c59864 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.284275] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51a332cd-2620-458d-a3eb-a65ce29bfc0c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.322375] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f70f74-294f-4dab-a17e-f4aee5162994 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.331958] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d8e769-bd56-4c81-8ca7-d8563fc07832 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.345887] env[62522]: DEBUG nova.compute.provider_tree [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 980.462759] env[62522]: DEBUG oslo_vmware.api [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415841, 'name': CloneVM_Task, 'duration_secs': 2.594466} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.463293] env[62522]: INFO nova.virt.vmwareapi.vmops [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Created linked-clone VM from snapshot [ 980.464050] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-142c8f0c-6dce-49d6-b638-c456cd8ed62f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.472753] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Uploading image 1f48c333-83a0-4c83-a8d3-3650d8a5edd4 {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 980.503746] env[62522]: DEBUG oslo_vmware.rw_handles [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 980.503746] env[62522]: value = "vm-489771" [ 980.503746] env[62522]: _type = "VirtualMachine" [ 980.503746] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 980.504400] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-824317fb-59c4-426d-b3ec-62d1e76c06f8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.511481] env[62522]: DEBUG oslo_vmware.rw_handles [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lease: (returnval){ [ 980.511481] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52634ae3-af3d-1817-5f9d-1f451fa712eb" [ 980.511481] env[62522]: _type = "HttpNfcLease" [ 980.511481] env[62522]: } obtained for exporting VM: (result){ [ 980.511481] env[62522]: value = "vm-489771" [ 980.511481] env[62522]: _type = "VirtualMachine" [ 980.511481] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 980.511794] env[62522]: DEBUG oslo_vmware.api [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the lease: (returnval){ [ 980.511794] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52634ae3-af3d-1817-5f9d-1f451fa712eb" [ 980.511794] env[62522]: _type = "HttpNfcLease" [ 980.511794] env[62522]: } to be ready. {{(pid=62522) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 980.519416] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 980.519416] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52634ae3-af3d-1817-5f9d-1f451fa712eb" [ 980.519416] env[62522]: _type = "HttpNfcLease" [ 980.519416] env[62522]: } is initializing. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 980.671739] env[62522]: DEBUG oslo_concurrency.lockutils [None req-00083650-b042-4681-9645-f6a846b8f2a4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "7f8a8270-5014-446c-aa42-ea0b4079e5a9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.715s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.849496] env[62522]: DEBUG nova.scheduler.client.report [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 980.857678] env[62522]: DEBUG nova.compute.manager [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 980.896311] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquiring lock "548364e9-b19a-4777-8e62-19b8a0594f36" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.896311] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Lock "548364e9-b19a-4777-8e62-19b8a0594f36" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.898795] env[62522]: DEBUG nova.virt.hardware [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 980.899286] env[62522]: DEBUG nova.virt.hardware [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 980.899563] env[62522]: DEBUG nova.virt.hardware [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 980.899986] env[62522]: DEBUG nova.virt.hardware [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 980.900255] env[62522]: DEBUG nova.virt.hardware [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 980.900505] env[62522]: DEBUG nova.virt.hardware [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 980.900911] env[62522]: DEBUG nova.virt.hardware [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 980.901263] env[62522]: DEBUG nova.virt.hardware [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 980.901654] env[62522]: DEBUG nova.virt.hardware [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 980.902023] env[62522]: DEBUG nova.virt.hardware [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 980.902391] env[62522]: DEBUG nova.virt.hardware [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 980.904100] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6e3a830-4c98-4bd0-9441-95687847e03b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.915786] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c0abca6-888a-402f-90be-7b54249d1ae8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.021151] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 981.021151] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52634ae3-af3d-1817-5f9d-1f451fa712eb" [ 981.021151] env[62522]: _type = "HttpNfcLease" [ 981.021151] env[62522]: } is ready. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 981.021477] env[62522]: DEBUG oslo_vmware.rw_handles [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 981.021477] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52634ae3-af3d-1817-5f9d-1f451fa712eb" [ 981.021477] env[62522]: _type = "HttpNfcLease" [ 981.021477] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 981.022327] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa4a8d0f-f38c-40e9-96f4-191cdceaee47 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.030457] env[62522]: DEBUG oslo_vmware.rw_handles [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fa52d9-958e-5536-a893-24332fcfe39d/disk-0.vmdk from lease info. {{(pid=62522) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 981.030672] env[62522]: DEBUG oslo_vmware.rw_handles [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fa52d9-958e-5536-a893-24332fcfe39d/disk-0.vmdk for reading. {{(pid=62522) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 981.189694] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-bf5414b5-3613-49dd-a58e-d101d948a683 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.365137] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.526s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.368017] env[62522]: INFO nova.compute.manager [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Migrating [ 981.381310] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.398s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.385409] env[62522]: DEBUG nova.objects.instance [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Lazy-loading 'resources' on Instance uuid 6ef27aee-719c-4089-825d-fc117e867bde {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 981.411925] env[62522]: DEBUG nova.compute.manager [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 981.684967] env[62522]: DEBUG nova.compute.manager [req-b205f72a-c76a-4781-a9a7-d47a4c6e4c48 req-3ba8c5ae-e50d-44fd-b4cc-34ba173835d6 service nova] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Received event network-vif-plugged-bbfabacf-12e0-47bc-9bba-9bc066142dcd {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 981.685809] env[62522]: DEBUG oslo_concurrency.lockutils [req-b205f72a-c76a-4781-a9a7-d47a4c6e4c48 req-3ba8c5ae-e50d-44fd-b4cc-34ba173835d6 service nova] Acquiring lock "ec2d78cf-15f9-441b-9800-8fcc513f7774-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.686807] env[62522]: DEBUG oslo_concurrency.lockutils [req-b205f72a-c76a-4781-a9a7-d47a4c6e4c48 req-3ba8c5ae-e50d-44fd-b4cc-34ba173835d6 service nova] Lock "ec2d78cf-15f9-441b-9800-8fcc513f7774-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.688196] env[62522]: DEBUG oslo_concurrency.lockutils [req-b205f72a-c76a-4781-a9a7-d47a4c6e4c48 req-3ba8c5ae-e50d-44fd-b4cc-34ba173835d6 service nova] Lock "ec2d78cf-15f9-441b-9800-8fcc513f7774-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.688196] env[62522]: DEBUG nova.compute.manager [req-b205f72a-c76a-4781-a9a7-d47a4c6e4c48 req-3ba8c5ae-e50d-44fd-b4cc-34ba173835d6 service nova] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] No waiting events found dispatching network-vif-plugged-bbfabacf-12e0-47bc-9bba-9bc066142dcd {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 981.688196] env[62522]: WARNING nova.compute.manager [req-b205f72a-c76a-4781-a9a7-d47a4c6e4c48 req-3ba8c5ae-e50d-44fd-b4cc-34ba173835d6 service nova] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Received unexpected event network-vif-plugged-bbfabacf-12e0-47bc-9bba-9bc066142dcd for instance with vm_state building and task_state spawning. [ 981.901461] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "refresh_cache-917469c5-20be-4814-814f-a042415be021" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 981.901629] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquired lock "refresh_cache-917469c5-20be-4814-814f-a042415be021" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.901801] env[62522]: DEBUG nova.network.neutron [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 981.943635] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 982.007195] env[62522]: DEBUG nova.network.neutron [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Successfully updated port: bbfabacf-12e0-47bc-9bba-9bc066142dcd {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 982.355143] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0c99524-213f-42b0-b30d-701bd18cf6b8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.364700] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b495314-ae3b-429f-8306-7a0af59d7ac9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.415499] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f669691-174f-452d-ab3a-08459a05f0ed {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.425475] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48a9a960-c69f-4601-ae36-86959d026578 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.445296] env[62522]: DEBUG nova.compute.provider_tree [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 982.513950] env[62522]: DEBUG oslo_concurrency.lockutils [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Acquiring lock "refresh_cache-ec2d78cf-15f9-441b-9800-8fcc513f7774" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 982.514118] env[62522]: DEBUG oslo_concurrency.lockutils [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Acquired lock "refresh_cache-ec2d78cf-15f9-441b-9800-8fcc513f7774" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.514302] env[62522]: DEBUG nova.network.neutron [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 982.783309] env[62522]: DEBUG nova.network.neutron [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Updating instance_info_cache with network_info: [{"id": "195b1951-c091-4db1-82d8-3c20dfcaf6d1", "address": "fa:16:3e:9c:63:e3", "network": {"id": "70e81afa-0eda-49c5-b072-e79b3c287468", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1715169983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff5da278d2be4ca983424c8291beadec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7654928b-7afe-42e3-a18d-68ecc775cefe", "external-id": "cl2-zone-807", "segmentation_id": 807, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap195b1951-c0", "ovs_interfaceid": "195b1951-c091-4db1-82d8-3c20dfcaf6d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.949262] env[62522]: DEBUG nova.scheduler.client.report [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 983.066761] env[62522]: DEBUG nova.network.neutron [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 983.291050] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Releasing lock "refresh_cache-917469c5-20be-4814-814f-a042415be021" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 983.458201] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.082s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.460143] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.048s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.460420] env[62522]: DEBUG nova.objects.instance [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lazy-loading 'resources' on Instance uuid a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 983.490155] env[62522]: INFO nova.scheduler.client.report [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Deleted allocations for instance 6ef27aee-719c-4089-825d-fc117e867bde [ 983.534260] env[62522]: DEBUG nova.network.neutron [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Updating instance_info_cache with network_info: [{"id": "bbfabacf-12e0-47bc-9bba-9bc066142dcd", "address": "fa:16:3e:d4:d1:85", "network": {"id": "0961cfa8-f183-49ef-86bc-0cf80444b236", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-255632366-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "247c239f49c8441fb723ca1b25fab349", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbfabacf-12", "ovs_interfaceid": "bbfabacf-12e0-47bc-9bba-9bc066142dcd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.804018] env[62522]: DEBUG nova.compute.manager [req-783afaa1-2325-4f23-a7cf-fae61e381031 req-4345844c-38ac-4945-8dc3-be6c3cff8184 service nova] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Received event network-changed-661819ce-17f6-47b5-a704-1c8c43e50373 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 983.804018] env[62522]: DEBUG nova.compute.manager [req-783afaa1-2325-4f23-a7cf-fae61e381031 req-4345844c-38ac-4945-8dc3-be6c3cff8184 service nova] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Refreshing instance network info cache due to event network-changed-661819ce-17f6-47b5-a704-1c8c43e50373. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 983.805513] env[62522]: DEBUG oslo_concurrency.lockutils [req-783afaa1-2325-4f23-a7cf-fae61e381031 req-4345844c-38ac-4945-8dc3-be6c3cff8184 service nova] Acquiring lock "refresh_cache-7f8a8270-5014-446c-aa42-ea0b4079e5a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.805644] env[62522]: DEBUG oslo_concurrency.lockutils [req-783afaa1-2325-4f23-a7cf-fae61e381031 req-4345844c-38ac-4945-8dc3-be6c3cff8184 service nova] Acquired lock "refresh_cache-7f8a8270-5014-446c-aa42-ea0b4079e5a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.805815] env[62522]: DEBUG nova.network.neutron [req-783afaa1-2325-4f23-a7cf-fae61e381031 req-4345844c-38ac-4945-8dc3-be6c3cff8184 service nova] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Refreshing network info cache for port 661819ce-17f6-47b5-a704-1c8c43e50373 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 984.001467] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b1557125-7e00-4c46-b358-9c754ec09d87 tempest-ListImageFiltersTestJSON-1588512512 tempest-ListImageFiltersTestJSON-1588512512-project-member] Lock "6ef27aee-719c-4089-825d-fc117e867bde" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.180s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.038601] env[62522]: DEBUG oslo_concurrency.lockutils [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Releasing lock "refresh_cache-ec2d78cf-15f9-441b-9800-8fcc513f7774" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.039216] env[62522]: DEBUG nova.compute.manager [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Instance network_info: |[{"id": "bbfabacf-12e0-47bc-9bba-9bc066142dcd", "address": "fa:16:3e:d4:d1:85", "network": {"id": "0961cfa8-f183-49ef-86bc-0cf80444b236", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-255632366-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "247c239f49c8441fb723ca1b25fab349", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbfabacf-12", "ovs_interfaceid": "bbfabacf-12e0-47bc-9bba-9bc066142dcd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 984.040112] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d4:d1:85', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7aa6264-122d-4c35-82d0-860e451538ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bbfabacf-12e0-47bc-9bba-9bc066142dcd', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 984.048823] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Creating folder: Project (247c239f49c8441fb723ca1b25fab349). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 984.049758] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e4eda577-77bb-4116-ac52-923eca21e5a4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.066305] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Created folder: Project (247c239f49c8441fb723ca1b25fab349) in parent group-v489562. [ 984.066538] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Creating folder: Instances. Parent ref: group-v489772. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 984.069406] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ba0b5cf3-3509-4051-8b4d-26a23796af9f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.085440] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Created folder: Instances in parent group-v489772. [ 984.085741] env[62522]: DEBUG oslo.service.loopingcall [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 984.085965] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 984.086192] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-62aac575-82fe-4bf2-af97-bf683ebc0746 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.110070] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 984.110070] env[62522]: value = "task-2415849" [ 984.110070] env[62522]: _type = "Task" [ 984.110070] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.120377] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415849, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.380881] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a951af74-90be-4c42-b9b7-fe01c2155cc3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.392944] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef488cb0-82d0-4b40-be66-f2ca3c90e103 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.424667] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80c99926-ea09-4754-81ad-bad631cce1ed {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.433799] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b326c337-476d-42a7-adc4-facf2fd5d3b7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.453440] env[62522]: DEBUG nova.compute.provider_tree [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 984.623582] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415849, 'name': CreateVM_Task, 'duration_secs': 0.433783} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.623582] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 984.623803] env[62522]: DEBUG oslo_concurrency.lockutils [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 984.623867] env[62522]: DEBUG oslo_concurrency.lockutils [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.624465] env[62522]: DEBUG oslo_concurrency.lockutils [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 984.624766] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92b4aed6-d8c8-49f5-b15a-6f622047d1d9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.630400] env[62522]: DEBUG oslo_vmware.api [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Waiting for the task: (returnval){ [ 984.630400] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a967fb-d791-6574-8a32-cbd9a8e9699a" [ 984.630400] env[62522]: _type = "Task" [ 984.630400] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.642143] env[62522]: DEBUG oslo_vmware.api [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a967fb-d791-6574-8a32-cbd9a8e9699a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.655075] env[62522]: DEBUG nova.network.neutron [req-783afaa1-2325-4f23-a7cf-fae61e381031 req-4345844c-38ac-4945-8dc3-be6c3cff8184 service nova] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Updated VIF entry in instance network info cache for port 661819ce-17f6-47b5-a704-1c8c43e50373. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 984.655529] env[62522]: DEBUG nova.network.neutron [req-783afaa1-2325-4f23-a7cf-fae61e381031 req-4345844c-38ac-4945-8dc3-be6c3cff8184 service nova] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Updating instance_info_cache with network_info: [{"id": "661819ce-17f6-47b5-a704-1c8c43e50373", "address": "fa:16:3e:60:76:34", "network": {"id": "2a4f6f01-ad28-4f8f-b835-ea86e1791f49", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1577320995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82346c440c3343a0a5c233a48203a13c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap661819ce-17", "ovs_interfaceid": "661819ce-17f6-47b5-a704-1c8c43e50373", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.810299] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faa7f272-161e-4d4e-a992-0e2d5bdbcbaa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.833988] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Updating instance '917469c5-20be-4814-814f-a042415be021' progress to 0 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 984.958202] env[62522]: DEBUG nova.scheduler.client.report [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 984.968906] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Acquiring lock "895e6716-44cf-45b2-afd8-eaba71c32460" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.969157] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Lock "895e6716-44cf-45b2-afd8-eaba71c32460" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.141519] env[62522]: DEBUG oslo_vmware.api [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a967fb-d791-6574-8a32-cbd9a8e9699a, 'name': SearchDatastore_Task, 'duration_secs': 0.01563} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.141891] env[62522]: DEBUG oslo_concurrency.lockutils [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 985.142131] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 985.142375] env[62522]: DEBUG oslo_concurrency.lockutils [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.142524] env[62522]: DEBUG oslo_concurrency.lockutils [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.142706] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 985.142991] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d6d44e2f-2a57-4f79-939b-04e98757f467 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.151670] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 985.151898] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 985.152658] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f499fbe8-f94d-4c3f-a553-d52a42433f6a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.158742] env[62522]: DEBUG oslo_concurrency.lockutils [req-783afaa1-2325-4f23-a7cf-fae61e381031 req-4345844c-38ac-4945-8dc3-be6c3cff8184 service nova] Releasing lock "refresh_cache-7f8a8270-5014-446c-aa42-ea0b4079e5a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 985.158999] env[62522]: DEBUG nova.compute.manager [req-783afaa1-2325-4f23-a7cf-fae61e381031 req-4345844c-38ac-4945-8dc3-be6c3cff8184 service nova] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Received event network-changed-bbfabacf-12e0-47bc-9bba-9bc066142dcd {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 985.159189] env[62522]: DEBUG nova.compute.manager [req-783afaa1-2325-4f23-a7cf-fae61e381031 req-4345844c-38ac-4945-8dc3-be6c3cff8184 service nova] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Refreshing instance network info cache due to event network-changed-bbfabacf-12e0-47bc-9bba-9bc066142dcd. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 985.159396] env[62522]: DEBUG oslo_concurrency.lockutils [req-783afaa1-2325-4f23-a7cf-fae61e381031 req-4345844c-38ac-4945-8dc3-be6c3cff8184 service nova] Acquiring lock "refresh_cache-ec2d78cf-15f9-441b-9800-8fcc513f7774" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.159543] env[62522]: DEBUG oslo_concurrency.lockutils [req-783afaa1-2325-4f23-a7cf-fae61e381031 req-4345844c-38ac-4945-8dc3-be6c3cff8184 service nova] Acquired lock "refresh_cache-ec2d78cf-15f9-441b-9800-8fcc513f7774" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.159886] env[62522]: DEBUG nova.network.neutron [req-783afaa1-2325-4f23-a7cf-fae61e381031 req-4345844c-38ac-4945-8dc3-be6c3cff8184 service nova] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Refreshing network info cache for port bbfabacf-12e0-47bc-9bba-9bc066142dcd {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 985.161018] env[62522]: DEBUG oslo_vmware.api [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Waiting for the task: (returnval){ [ 985.161018] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52ae4228-91dc-bf3e-8bbb-9636c7122a65" [ 985.161018] env[62522]: _type = "Task" [ 985.161018] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.170849] env[62522]: DEBUG oslo_vmware.api [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52ae4228-91dc-bf3e-8bbb-9636c7122a65, 'name': SearchDatastore_Task, 'duration_secs': 0.009873} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.172673] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd3d23b5-889f-41e6-aeae-1646fc253469 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.179371] env[62522]: DEBUG oslo_vmware.api [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Waiting for the task: (returnval){ [ 985.179371] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d9bcaf-71ae-1ee9-9b63-f836537f2145" [ 985.179371] env[62522]: _type = "Task" [ 985.179371] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.187743] env[62522]: DEBUG oslo_vmware.api [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d9bcaf-71ae-1ee9-9b63-f836537f2145, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.340471] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 985.340901] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-26ad9b7a-9b42-4b48-bd34-913960497447 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.348071] env[62522]: DEBUG oslo_vmware.api [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 985.348071] env[62522]: value = "task-2415850" [ 985.348071] env[62522]: _type = "Task" [ 985.348071] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.357076] env[62522]: DEBUG oslo_vmware.api [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415850, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.470260] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.009s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.472739] env[62522]: DEBUG oslo_concurrency.lockutils [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.092s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.473137] env[62522]: DEBUG nova.objects.instance [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Lazy-loading 'resources' on Instance uuid ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 985.474535] env[62522]: DEBUG nova.compute.manager [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 985.506200] env[62522]: INFO nova.scheduler.client.report [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Deleted allocations for instance a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72 [ 985.691124] env[62522]: DEBUG oslo_vmware.api [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d9bcaf-71ae-1ee9-9b63-f836537f2145, 'name': SearchDatastore_Task, 'duration_secs': 0.009254} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.691417] env[62522]: DEBUG oslo_concurrency.lockutils [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 985.691722] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] ec2d78cf-15f9-441b-9800-8fcc513f7774/ec2d78cf-15f9-441b-9800-8fcc513f7774.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 985.692388] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c8114948-0dc5-41ba-9500-34d398f1641c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.699162] env[62522]: DEBUG oslo_vmware.api [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Waiting for the task: (returnval){ [ 985.699162] env[62522]: value = "task-2415851" [ 985.699162] env[62522]: _type = "Task" [ 985.699162] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.709465] env[62522]: DEBUG oslo_vmware.api [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Task: {'id': task-2415851, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.861414] env[62522]: DEBUG oslo_vmware.api [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415850, 'name': PowerOffVM_Task, 'duration_secs': 0.256234} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.861414] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 985.861414] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Updating instance '917469c5-20be-4814-814f-a042415be021' progress to 17 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 985.954049] env[62522]: DEBUG nova.network.neutron [req-783afaa1-2325-4f23-a7cf-fae61e381031 req-4345844c-38ac-4945-8dc3-be6c3cff8184 service nova] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Updated VIF entry in instance network info cache for port bbfabacf-12e0-47bc-9bba-9bc066142dcd. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 985.954452] env[62522]: DEBUG nova.network.neutron [req-783afaa1-2325-4f23-a7cf-fae61e381031 req-4345844c-38ac-4945-8dc3-be6c3cff8184 service nova] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Updating instance_info_cache with network_info: [{"id": "bbfabacf-12e0-47bc-9bba-9bc066142dcd", "address": "fa:16:3e:d4:d1:85", "network": {"id": "0961cfa8-f183-49ef-86bc-0cf80444b236", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-255632366-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "247c239f49c8441fb723ca1b25fab349", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbfabacf-12", "ovs_interfaceid": "bbfabacf-12e0-47bc-9bba-9bc066142dcd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.007220] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 986.016498] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2ed5b01a-46dd-4fda-bff9-5b7bc00a6d8a tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.066s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.212485] env[62522]: DEBUG oslo_vmware.api [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Task: {'id': task-2415851, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.370870] env[62522]: DEBUG nova.virt.hardware [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:04Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 986.370870] env[62522]: DEBUG nova.virt.hardware [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 986.372639] env[62522]: DEBUG nova.virt.hardware [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 986.372639] env[62522]: DEBUG nova.virt.hardware [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 986.372639] env[62522]: DEBUG nova.virt.hardware [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 986.372639] env[62522]: DEBUG nova.virt.hardware [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 986.372639] env[62522]: DEBUG nova.virt.hardware [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 986.372639] env[62522]: DEBUG nova.virt.hardware [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 986.372639] env[62522]: DEBUG nova.virt.hardware [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 986.372639] env[62522]: DEBUG nova.virt.hardware [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 986.373151] env[62522]: DEBUG nova.virt.hardware [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 986.378715] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b683955c-16e8-4c78-b4bb-9a123f869f0d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.393457] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46d237d3-97a9-4d91-ae31-5c69e4a6da69 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.402798] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaa6f0ca-b658-4aca-bb7d-29a4ed2bac22 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.407151] env[62522]: DEBUG oslo_vmware.api [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 986.407151] env[62522]: value = "task-2415852" [ 986.407151] env[62522]: _type = "Task" [ 986.407151] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.444743] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4037071-7363-44af-a022-29118fcc8881 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.448060] env[62522]: DEBUG oslo_vmware.api [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415852, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.453305] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f4886d6-2086-4d3e-bc7a-f5b6d0e63f2d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.457856] env[62522]: DEBUG oslo_concurrency.lockutils [req-783afaa1-2325-4f23-a7cf-fae61e381031 req-4345844c-38ac-4945-8dc3-be6c3cff8184 service nova] Releasing lock "refresh_cache-ec2d78cf-15f9-441b-9800-8fcc513f7774" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 986.469960] env[62522]: DEBUG nova.compute.provider_tree [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 986.714818] env[62522]: DEBUG oslo_vmware.api [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Task: {'id': task-2415851, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.563821} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.715140] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] ec2d78cf-15f9-441b-9800-8fcc513f7774/ec2d78cf-15f9-441b-9800-8fcc513f7774.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 986.715386] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 986.715673] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-da956236-07f8-4d05-ae68-bfc39a74dcc9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.724031] env[62522]: DEBUG oslo_vmware.api [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Waiting for the task: (returnval){ [ 986.724031] env[62522]: value = "task-2415853" [ 986.724031] env[62522]: _type = "Task" [ 986.724031] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.732211] env[62522]: DEBUG oslo_vmware.api [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Task: {'id': task-2415853, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.920354] env[62522]: DEBUG oslo_vmware.api [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415852, 'name': ReconfigVM_Task, 'duration_secs': 0.386486} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.920354] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Updating instance '917469c5-20be-4814-814f-a042415be021' progress to 33 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 986.974976] env[62522]: DEBUG nova.scheduler.client.report [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 987.234677] env[62522]: DEBUG oslo_vmware.api [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Task: {'id': task-2415853, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069822} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.234981] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 987.235828] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80ffe47d-e885-44ea-b4bd-200f122bf72b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.260879] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Reconfiguring VM instance instance-0000004e to attach disk [datastore2] ec2d78cf-15f9-441b-9800-8fcc513f7774/ec2d78cf-15f9-441b-9800-8fcc513f7774.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 987.261300] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7cf2a258-15ed-4e54-bcdb-3aee8a245d55 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.281343] env[62522]: DEBUG oslo_vmware.api [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Waiting for the task: (returnval){ [ 987.281343] env[62522]: value = "task-2415854" [ 987.281343] env[62522]: _type = "Task" [ 987.281343] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.290083] env[62522]: DEBUG oslo_vmware.api [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Task: {'id': task-2415854, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.428143] env[62522]: DEBUG nova.virt.hardware [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 987.428481] env[62522]: DEBUG nova.virt.hardware [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 987.428772] env[62522]: DEBUG nova.virt.hardware [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 987.428952] env[62522]: DEBUG nova.virt.hardware [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 987.429146] env[62522]: DEBUG nova.virt.hardware [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 987.429346] env[62522]: DEBUG nova.virt.hardware [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 987.429695] env[62522]: DEBUG nova.virt.hardware [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 987.429831] env[62522]: DEBUG nova.virt.hardware [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 987.430056] env[62522]: DEBUG nova.virt.hardware [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 987.430288] env[62522]: DEBUG nova.virt.hardware [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 987.430534] env[62522]: DEBUG nova.virt.hardware [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 987.437310] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Reconfiguring VM instance instance-00000049 to detach disk 2000 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 987.437782] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c1d8c6cd-6988-4fbd-8953-f02fc54e1fee {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.463615] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "02708991-7f71-408e-89d8-932b845553d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.464301] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "02708991-7f71-408e-89d8-932b845553d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.470409] env[62522]: DEBUG oslo_vmware.api [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 987.470409] env[62522]: value = "task-2415855" [ 987.470409] env[62522]: _type = "Task" [ 987.470409] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.481668] env[62522]: DEBUG oslo_vmware.api [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415855, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.484958] env[62522]: DEBUG oslo_concurrency.lockutils [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.012s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.488401] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.018s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.488696] env[62522]: DEBUG nova.objects.instance [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lazy-loading 'resources' on Instance uuid 7e5fc552-748f-4569-bd61-c81a52bb46b0 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 987.513037] env[62522]: INFO nova.scheduler.client.report [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Deleted allocations for instance ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a [ 987.794945] env[62522]: DEBUG oslo_vmware.api [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Task: {'id': task-2415854, 'name': ReconfigVM_Task, 'duration_secs': 0.406133} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.794945] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Reconfigured VM instance instance-0000004e to attach disk [datastore2] ec2d78cf-15f9-441b-9800-8fcc513f7774/ec2d78cf-15f9-441b-9800-8fcc513f7774.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 987.795517] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-29a7b8b5-2f33-4022-9257-ab64c26570de {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.802007] env[62522]: DEBUG oslo_vmware.api [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Waiting for the task: (returnval){ [ 987.802007] env[62522]: value = "task-2415856" [ 987.802007] env[62522]: _type = "Task" [ 987.802007] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.810216] env[62522]: DEBUG oslo_vmware.api [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Task: {'id': task-2415856, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.969413] env[62522]: DEBUG nova.compute.manager [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 987.987881] env[62522]: DEBUG oslo_vmware.api [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415855, 'name': ReconfigVM_Task, 'duration_secs': 0.205895} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.988218] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Reconfigured VM instance instance-00000049 to detach disk 2000 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 987.989684] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6b8d0de-1ae5-47ff-a2b9-42f5da8ade49 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.019272] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] 917469c5-20be-4814-814f-a042415be021/917469c5-20be-4814-814f-a042415be021.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 988.025505] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-805e0543-4d6b-484c-9b91-0c6fc7957675 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.038973] env[62522]: DEBUG oslo_concurrency.lockutils [None req-575fd3d5-0856-46e0-9fdf-9fb8ae6c73dc tempest-ServerShowV247Test-757790888 tempest-ServerShowV247Test-757790888-project-member] Lock "ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.389s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 988.046696] env[62522]: DEBUG oslo_vmware.api [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 988.046696] env[62522]: value = "task-2415857" [ 988.046696] env[62522]: _type = "Task" [ 988.046696] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.062075] env[62522]: DEBUG oslo_vmware.api [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415857, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.314464] env[62522]: DEBUG oslo_vmware.api [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Task: {'id': task-2415856, 'name': Rename_Task, 'duration_secs': 0.225101} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.315304] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 988.315882] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d98be8e3-8c54-40fc-aa39-fce54fe174b3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.321040] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20c3dc5c-8fd7-48ef-99cb-0f637a11a67f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.331023] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c981eba-5d9e-4c88-a182-e147c091404c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.334169] env[62522]: DEBUG oslo_vmware.api [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Waiting for the task: (returnval){ [ 988.334169] env[62522]: value = "task-2415858" [ 988.334169] env[62522]: _type = "Task" [ 988.334169] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.342876] env[62522]: DEBUG oslo_vmware.rw_handles [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fa52d9-958e-5536-a893-24332fcfe39d/disk-0.vmdk. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 988.370096] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a95862-af1e-4937-ae87-ae05fa14b4d1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.374084] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f104abc-e836-438a-b6f1-b0417e0e67e9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.379694] env[62522]: DEBUG oslo_vmware.api [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Task: {'id': task-2415858, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.382325] env[62522]: DEBUG oslo_vmware.rw_handles [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fa52d9-958e-5536-a893-24332fcfe39d/disk-0.vmdk is in state: ready. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 988.382554] env[62522]: ERROR oslo_vmware.rw_handles [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fa52d9-958e-5536-a893-24332fcfe39d/disk-0.vmdk due to incomplete transfer. [ 988.384520] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-b71359e2-3d25-4b79-a46e-5db4a6c18b1a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.386927] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb1f0beb-9e8d-4493-8d27-5d731f214184 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.402168] env[62522]: DEBUG nova.compute.provider_tree [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 988.404288] env[62522]: DEBUG oslo_vmware.rw_handles [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fa52d9-958e-5536-a893-24332fcfe39d/disk-0.vmdk. {{(pid=62522) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 988.404480] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Uploaded image 1f48c333-83a0-4c83-a8d3-3650d8a5edd4 to the Glance image server {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 988.407400] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Destroying the VM {{(pid=62522) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 988.408728] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-75ebbff2-be9b-46ab-9e6b-d997413bffc0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.414109] env[62522]: DEBUG oslo_vmware.api [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 988.414109] env[62522]: value = "task-2415859" [ 988.414109] env[62522]: _type = "Task" [ 988.414109] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.422389] env[62522]: DEBUG oslo_vmware.api [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415859, 'name': Destroy_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.506969] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.557493] env[62522]: DEBUG oslo_vmware.api [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415857, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.847125] env[62522]: DEBUG oslo_vmware.api [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Task: {'id': task-2415858, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.909551] env[62522]: DEBUG nova.scheduler.client.report [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 988.923597] env[62522]: DEBUG oslo_vmware.api [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415859, 'name': Destroy_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.062563] env[62522]: DEBUG oslo_vmware.api [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415857, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.148765] env[62522]: DEBUG oslo_concurrency.lockutils [None req-de2159b8-87ad-4ef6-9e5e-abe2a1c288c1 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquiring lock "3c4c395c-0625-4569-990d-e2d4ad162c14" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.148765] env[62522]: DEBUG oslo_concurrency.lockutils [None req-de2159b8-87ad-4ef6-9e5e-abe2a1c288c1 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "3c4c395c-0625-4569-990d-e2d4ad162c14" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.349050] env[62522]: DEBUG oslo_vmware.api [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Task: {'id': task-2415858, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.418924] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.928s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.421379] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d89ddf3f-5c5b-48cd-a383-afd87d13fca8 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.933s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.421803] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d89ddf3f-5c5b-48cd-a383-afd87d13fca8 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.424229] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.481s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.426238] env[62522]: INFO nova.compute.claims [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 989.444255] env[62522]: DEBUG oslo_vmware.api [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415859, 'name': Destroy_Task} progress is 33%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.460992] env[62522]: INFO nova.scheduler.client.report [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Deleted allocations for instance 7e5fc552-748f-4569-bd61-c81a52bb46b0 [ 989.473386] env[62522]: INFO nova.scheduler.client.report [None req-d89ddf3f-5c5b-48cd-a383-afd87d13fca8 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Deleted allocations for instance 74e52638-d284-4bd1-8cff-c7aca9426f75 [ 989.562536] env[62522]: DEBUG oslo_vmware.api [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415857, 'name': ReconfigVM_Task, 'duration_secs': 1.443531} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.563025] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Reconfigured VM instance instance-00000049 to attach disk [datastore2] 917469c5-20be-4814-814f-a042415be021/917469c5-20be-4814-814f-a042415be021.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 989.563208] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Updating instance '917469c5-20be-4814-814f-a042415be021' progress to 50 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 989.652821] env[62522]: DEBUG nova.compute.utils [None req-de2159b8-87ad-4ef6-9e5e-abe2a1c288c1 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 989.847992] env[62522]: DEBUG oslo_vmware.api [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Task: {'id': task-2415858, 'name': PowerOnVM_Task, 'duration_secs': 1.282975} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.848277] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 989.848476] env[62522]: INFO nova.compute.manager [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Took 8.99 seconds to spawn the instance on the hypervisor. [ 989.848667] env[62522]: DEBUG nova.compute.manager [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 989.849468] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62aa2a12-9632-467a-a77d-3e9e053c0a94 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.931059] env[62522]: DEBUG oslo_vmware.api [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415859, 'name': Destroy_Task, 'duration_secs': 1.090615} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.931406] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Destroyed the VM [ 989.933057] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Deleting Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 989.933057] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-bb803fc3-de51-4a88-8491-d212067c048f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.940910] env[62522]: DEBUG oslo_vmware.api [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 989.940910] env[62522]: value = "task-2415860" [ 989.940910] env[62522]: _type = "Task" [ 989.940910] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.949669] env[62522]: DEBUG oslo_vmware.api [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415860, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.972606] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cbaa58e0-759a-45dd-96a6-c4ea884bee35 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lock "7e5fc552-748f-4569-bd61-c81a52bb46b0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.441s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.981160] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d89ddf3f-5c5b-48cd-a383-afd87d13fca8 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "74e52638-d284-4bd1-8cff-c7aca9426f75" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.934s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.064825] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "4e27a87c-4891-4e69-a6fa-312b026bf11e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.065111] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "4e27a87c-4891-4e69-a6fa-312b026bf11e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.070294] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eaab392-dd51-4eda-9442-d03137272e18 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.092048] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d68ae8f3-1ebc-4649-af02-c1b715d09a61 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.114440] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Updating instance '917469c5-20be-4814-814f-a042415be021' progress to 67 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 990.156287] env[62522]: DEBUG oslo_concurrency.lockutils [None req-de2159b8-87ad-4ef6-9e5e-abe2a1c288c1 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "3c4c395c-0625-4569-990d-e2d4ad162c14" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.370643] env[62522]: INFO nova.compute.manager [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Took 27.00 seconds to build instance. [ 990.455211] env[62522]: DEBUG oslo_vmware.api [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415860, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.568182] env[62522]: DEBUG nova.compute.manager [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 990.678195] env[62522]: DEBUG nova.network.neutron [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Port 195b1951-c091-4db1-82d8-3c20dfcaf6d1 binding to destination host cpu-1 is already ACTIVE {{(pid=62522) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 990.800682] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6a8cd5b-ec85-493e-b217-56732c8c1f72 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.810089] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c073aac-c7c7-4f54-9dad-bd81cf83a022 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.844206] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-025ee4b2-f27b-4eff-a736-6443b99fc67f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.852070] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c798f0-4127-44c4-99b5-4dd94de73e02 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.866358] env[62522]: DEBUG nova.compute.provider_tree [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 990.874170] env[62522]: DEBUG oslo_concurrency.lockutils [None req-834ff955-9783-44b3-a60e-d5ce4d747b37 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Lock "ec2d78cf-15f9-441b-9800-8fcc513f7774" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.519s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.926014] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquiring lock "879354d3-7423-41e2-93f6-0d8d3a120170" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.926354] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "879354d3-7423-41e2-93f6-0d8d3a120170" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.926586] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquiring lock "879354d3-7423-41e2-93f6-0d8d3a120170-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.926775] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "879354d3-7423-41e2-93f6-0d8d3a120170-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.926974] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "879354d3-7423-41e2-93f6-0d8d3a120170-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.929070] env[62522]: INFO nova.compute.manager [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Terminating instance [ 990.954594] env[62522]: DEBUG oslo_vmware.api [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415860, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.088684] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.236752] env[62522]: DEBUG oslo_concurrency.lockutils [None req-de2159b8-87ad-4ef6-9e5e-abe2a1c288c1 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquiring lock "3c4c395c-0625-4569-990d-e2d4ad162c14" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.236855] env[62522]: DEBUG oslo_concurrency.lockutils [None req-de2159b8-87ad-4ef6-9e5e-abe2a1c288c1 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "3c4c395c-0625-4569-990d-e2d4ad162c14" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.237092] env[62522]: INFO nova.compute.manager [None req-de2159b8-87ad-4ef6-9e5e-abe2a1c288c1 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Attaching volume 1f2482a0-4337-44cc-a788-681ae00783ea to /dev/sdb [ 991.270449] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7980b311-4f04-459e-af68-66aa3608b868 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.277812] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b0a2ddd-a543-4295-9763-7db116f9b471 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.290927] env[62522]: DEBUG nova.virt.block_device [None req-de2159b8-87ad-4ef6-9e5e-abe2a1c288c1 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Updating existing volume attachment record: 226c4135-fa80-4502-b101-8a4d6c83c91e {{(pid=62522) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 991.372046] env[62522]: DEBUG nova.scheduler.client.report [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 991.432976] env[62522]: DEBUG nova.compute.manager [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 991.433242] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 991.434211] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a50d562-59c3-406e-a57a-90dc1555bb2a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.443450] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 991.443801] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-85443572-ea4b-4104-bffd-1fb00699a12d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.459332] env[62522]: DEBUG oslo_vmware.api [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415860, 'name': RemoveSnapshot_Task, 'duration_secs': 1.454363} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.459332] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Deleted Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 991.459332] env[62522]: INFO nova.compute.manager [None req-7d8facd8-a564-4b79-8693-8c7ebc208e63 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Took 16.14 seconds to snapshot the instance on the hypervisor. [ 991.461469] env[62522]: DEBUG oslo_vmware.api [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 991.461469] env[62522]: value = "task-2415861" [ 991.461469] env[62522]: _type = "Task" [ 991.461469] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.470063] env[62522]: DEBUG oslo_vmware.api [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415861, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.542537] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Acquiring lock "ec2d78cf-15f9-441b-9800-8fcc513f7774" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.542841] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Lock "ec2d78cf-15f9-441b-9800-8fcc513f7774" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.543072] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Acquiring lock "ec2d78cf-15f9-441b-9800-8fcc513f7774-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.543317] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Lock "ec2d78cf-15f9-441b-9800-8fcc513f7774-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.543524] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Lock "ec2d78cf-15f9-441b-9800-8fcc513f7774-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.545722] env[62522]: INFO nova.compute.manager [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Terminating instance [ 991.698526] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "917469c5-20be-4814-814f-a042415be021-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.698780] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "917469c5-20be-4814-814f-a042415be021-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.698950] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "917469c5-20be-4814-814f-a042415be021-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.877421] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.453s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.877985] env[62522]: DEBUG nova.compute.manager [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 991.880733] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.875s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.882184] env[62522]: INFO nova.compute.claims [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 991.977188] env[62522]: DEBUG oslo_vmware.api [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415861, 'name': PowerOffVM_Task, 'duration_secs': 0.238684} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.977410] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 991.977583] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 991.977847] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2aa80f7a-0466-4ea2-8c12-57a64ff228d4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.039378] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 992.039685] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 992.039949] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Deleting the datastore file [datastore2] 879354d3-7423-41e2-93f6-0d8d3a120170 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 992.040299] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cf25e5e0-1b04-448b-90a2-bda56ab622dc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.048172] env[62522]: DEBUG oslo_vmware.api [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for the task: (returnval){ [ 992.048172] env[62522]: value = "task-2415866" [ 992.048172] env[62522]: _type = "Task" [ 992.048172] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.048949] env[62522]: DEBUG nova.compute.manager [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 992.049211] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 992.053480] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8814123-ee20-44ff-8d53-21929e2516c8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.061106] env[62522]: DEBUG oslo_vmware.api [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415866, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.063284] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 992.063542] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-848e457e-e193-4c51-9483-f7c414e6682c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.068965] env[62522]: DEBUG oslo_vmware.api [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Waiting for the task: (returnval){ [ 992.068965] env[62522]: value = "task-2415867" [ 992.068965] env[62522]: _type = "Task" [ 992.068965] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.076839] env[62522]: DEBUG oslo_vmware.api [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Task: {'id': task-2415867, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.387042] env[62522]: DEBUG nova.compute.utils [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 992.390096] env[62522]: DEBUG nova.compute.manager [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 992.390269] env[62522]: DEBUG nova.network.neutron [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 992.432796] env[62522]: DEBUG nova.policy [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '55ada09ff7054189ba8820a7b1963fd5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e1ca5dfb8f2d4b2e932679e017fe8b3e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 992.559413] env[62522]: DEBUG oslo_vmware.api [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Task: {'id': task-2415866, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174187} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.559679] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 992.559898] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 992.560523] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 992.560882] env[62522]: INFO nova.compute.manager [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Took 1.13 seconds to destroy the instance on the hypervisor. [ 992.561177] env[62522]: DEBUG oslo.service.loopingcall [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 992.561381] env[62522]: DEBUG nova.compute.manager [-] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 992.561597] env[62522]: DEBUG nova.network.neutron [-] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 992.578869] env[62522]: DEBUG oslo_vmware.api [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Task: {'id': task-2415867, 'name': PowerOffVM_Task, 'duration_secs': 0.159036} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.579140] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 992.579314] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 992.579563] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fe29c0b2-5275-4567-93a5-6bedc816110b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.676906] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 992.676906] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 992.676906] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Deleting the datastore file [datastore2] ec2d78cf-15f9-441b-9800-8fcc513f7774 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 992.676906] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-df0a1578-b7f4-435b-beca-9944810c5af8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.683360] env[62522]: DEBUG oslo_vmware.api [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Waiting for the task: (returnval){ [ 992.683360] env[62522]: value = "task-2415869" [ 992.683360] env[62522]: _type = "Task" [ 992.683360] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.691368] env[62522]: DEBUG oslo_vmware.api [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Task: {'id': task-2415869, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.722745] env[62522]: DEBUG nova.network.neutron [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Successfully created port: 47809969-d413-4587-acbe-3071b4ded420 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 992.770194] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "refresh_cache-917469c5-20be-4814-814f-a042415be021" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 992.770316] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquired lock "refresh_cache-917469c5-20be-4814-814f-a042415be021" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.770487] env[62522]: DEBUG nova.network.neutron [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 992.894198] env[62522]: DEBUG nova.compute.manager [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 992.921588] env[62522]: DEBUG nova.compute.manager [req-39bfe5e0-222c-4864-82e5-b04d45999ea8 req-a2c929e6-dc42-44ee-b258-0dde7898226f service nova] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Received event network-vif-deleted-cd619060-5655-434c-967f-7552adca021b {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 992.921847] env[62522]: INFO nova.compute.manager [req-39bfe5e0-222c-4864-82e5-b04d45999ea8 req-a2c929e6-dc42-44ee-b258-0dde7898226f service nova] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Neutron deleted interface cd619060-5655-434c-967f-7552adca021b; detaching it from the instance and deleting it from the info cache [ 992.922076] env[62522]: DEBUG nova.network.neutron [req-39bfe5e0-222c-4864-82e5-b04d45999ea8 req-a2c929e6-dc42-44ee-b258-0dde7898226f service nova] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.193152] env[62522]: DEBUG oslo_vmware.api [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Task: {'id': task-2415869, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.46125} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.195582] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 993.195777] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 993.195959] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 993.196146] env[62522]: INFO nova.compute.manager [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Took 1.15 seconds to destroy the instance on the hypervisor. [ 993.196449] env[62522]: DEBUG oslo.service.loopingcall [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 993.196907] env[62522]: DEBUG nova.compute.manager [-] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 993.197024] env[62522]: DEBUG nova.network.neutron [-] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 993.216620] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b070bbe-44c7-425e-96fb-ed07142c6293 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.224030] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-860e0659-cc6a-48f3-89e0-79263173a72e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.253323] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af0dde3e-3acc-4439-a600-b0f09a443fac {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.262143] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bab61c5-d582-4a31-b07b-e10eac8f0d5a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.276379] env[62522]: DEBUG nova.compute.provider_tree [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 993.402480] env[62522]: DEBUG nova.network.neutron [-] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.425951] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6d2aed1f-85dd-4e4f-b4de-c56c9a3fd43f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.437219] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a6fd7a5-82ad-4b75-a452-9da957888727 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.474339] env[62522]: DEBUG nova.compute.manager [req-39bfe5e0-222c-4864-82e5-b04d45999ea8 req-a2c929e6-dc42-44ee-b258-0dde7898226f service nova] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Detach interface failed, port_id=cd619060-5655-434c-967f-7552adca021b, reason: Instance 879354d3-7423-41e2-93f6-0d8d3a120170 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 993.516695] env[62522]: DEBUG nova.network.neutron [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Updating instance_info_cache with network_info: [{"id": "195b1951-c091-4db1-82d8-3c20dfcaf6d1", "address": "fa:16:3e:9c:63:e3", "network": {"id": "70e81afa-0eda-49c5-b072-e79b3c287468", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1715169983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff5da278d2be4ca983424c8291beadec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7654928b-7afe-42e3-a18d-68ecc775cefe", "external-id": "cl2-zone-807", "segmentation_id": 807, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap195b1951-c0", "ovs_interfaceid": "195b1951-c091-4db1-82d8-3c20dfcaf6d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.799329] env[62522]: ERROR nova.scheduler.client.report [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [req-545a4647-95c2-421b-9cdf-0adbddf0b88a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c7fa38b2-245d-4337-a012-22c1a01c0a72. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-545a4647-95c2-421b-9cdf-0adbddf0b88a"}]} [ 993.815574] env[62522]: DEBUG nova.scheduler.client.report [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Refreshing inventories for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 993.828737] env[62522]: DEBUG nova.scheduler.client.report [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Updating ProviderTree inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 993.832610] env[62522]: DEBUG nova.compute.provider_tree [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 993.838790] env[62522]: DEBUG nova.scheduler.client.report [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Refreshing aggregate associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, aggregates: None {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 993.855706] env[62522]: DEBUG nova.scheduler.client.report [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Refreshing trait associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 993.913566] env[62522]: INFO nova.compute.manager [-] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Took 1.35 seconds to deallocate network for instance. [ 993.914672] env[62522]: DEBUG nova.compute.manager [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 993.947042] env[62522]: DEBUG nova.virt.hardware [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 993.947326] env[62522]: DEBUG nova.virt.hardware [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 993.947491] env[62522]: DEBUG nova.virt.hardware [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 993.947689] env[62522]: DEBUG nova.virt.hardware [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 993.947851] env[62522]: DEBUG nova.virt.hardware [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 993.948020] env[62522]: DEBUG nova.virt.hardware [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 993.948303] env[62522]: DEBUG nova.virt.hardware [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 993.948577] env[62522]: DEBUG nova.virt.hardware [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 993.948657] env[62522]: DEBUG nova.virt.hardware [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 993.948833] env[62522]: DEBUG nova.virt.hardware [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 993.949043] env[62522]: DEBUG nova.virt.hardware [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 993.949976] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c7de1d4-4cc9-4f3d-be24-b731aabadd9c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.960945] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-457e48cd-7cbf-4ba5-9289-44235ac95fb7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.018789] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Releasing lock "refresh_cache-917469c5-20be-4814-814f-a042415be021" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.042305] env[62522]: DEBUG nova.network.neutron [-] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.154326] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4046b7b-4116-4506-8b28-d15a50f8762d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.162552] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8916d5f6-a08b-41dd-9343-ddd388c22745 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.201062] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e323bd86-5a8b-4037-b7cc-a8e5c2a639e2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.207674] env[62522]: DEBUG nova.compute.manager [req-71d9ee88-00e1-4a69-822a-aaa240221d04 req-e65194dc-ee2a-4f6d-9903-8b782d4c59d2 service nova] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Received event network-vif-plugged-47809969-d413-4587-acbe-3071b4ded420 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 994.207895] env[62522]: DEBUG oslo_concurrency.lockutils [req-71d9ee88-00e1-4a69-822a-aaa240221d04 req-e65194dc-ee2a-4f6d-9903-8b782d4c59d2 service nova] Acquiring lock "548364e9-b19a-4777-8e62-19b8a0594f36-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.208070] env[62522]: DEBUG oslo_concurrency.lockutils [req-71d9ee88-00e1-4a69-822a-aaa240221d04 req-e65194dc-ee2a-4f6d-9903-8b782d4c59d2 service nova] Lock "548364e9-b19a-4777-8e62-19b8a0594f36-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.208231] env[62522]: DEBUG oslo_concurrency.lockutils [req-71d9ee88-00e1-4a69-822a-aaa240221d04 req-e65194dc-ee2a-4f6d-9903-8b782d4c59d2 service nova] Lock "548364e9-b19a-4777-8e62-19b8a0594f36-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.208394] env[62522]: DEBUG nova.compute.manager [req-71d9ee88-00e1-4a69-822a-aaa240221d04 req-e65194dc-ee2a-4f6d-9903-8b782d4c59d2 service nova] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] No waiting events found dispatching network-vif-plugged-47809969-d413-4587-acbe-3071b4ded420 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 994.208553] env[62522]: WARNING nova.compute.manager [req-71d9ee88-00e1-4a69-822a-aaa240221d04 req-e65194dc-ee2a-4f6d-9903-8b782d4c59d2 service nova] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Received unexpected event network-vif-plugged-47809969-d413-4587-acbe-3071b4ded420 for instance with vm_state building and task_state spawning. [ 994.213198] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a85ee3d3-ba60-4d64-b82d-ffab0ae4a37e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.228320] env[62522]: DEBUG nova.compute.provider_tree [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 994.302516] env[62522]: DEBUG nova.network.neutron [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Successfully updated port: 47809969-d413-4587-acbe-3071b4ded420 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 994.424229] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.545708] env[62522]: INFO nova.compute.manager [-] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Took 1.35 seconds to deallocate network for instance. [ 994.548739] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e989a89-1a87-4004-958c-79a3025ff273 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.570933] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a75bf042-669f-45af-a717-f7801f48f476 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.578250] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Updating instance '917469c5-20be-4814-814f-a042415be021' progress to 83 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 994.758908] env[62522]: DEBUG nova.scheduler.client.report [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Updated inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with generation 110 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 994.759192] env[62522]: DEBUG nova.compute.provider_tree [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Updating resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 generation from 110 to 111 during operation: update_inventory {{(pid=62522) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 994.759374] env[62522]: DEBUG nova.compute.provider_tree [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 994.807513] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquiring lock "refresh_cache-548364e9-b19a-4777-8e62-19b8a0594f36" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 994.807667] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquired lock "refresh_cache-548364e9-b19a-4777-8e62-19b8a0594f36" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.807876] env[62522]: DEBUG nova.network.neutron [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 994.950425] env[62522]: DEBUG nova.compute.manager [req-930e66b8-0200-44ec-9bb5-65f1b6fce06b req-a6d35a3e-73c0-4f33-8387-7b2ce9de5ed8 service nova] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Received event network-vif-deleted-bbfabacf-12e0-47bc-9bba-9bc066142dcd {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 995.056043] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.087176] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 995.087479] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f2e7bcc9-f8fc-486f-ab67-3e729a93d3a0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.094762] env[62522]: DEBUG oslo_vmware.api [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 995.094762] env[62522]: value = "task-2415871" [ 995.094762] env[62522]: _type = "Task" [ 995.094762] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.102315] env[62522]: DEBUG oslo_vmware.api [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415871, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.264716] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.384s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 995.265342] env[62522]: DEBUG nova.compute.manager [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 995.268014] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.761s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 995.269447] env[62522]: INFO nova.compute.claims [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 995.343126] env[62522]: DEBUG nova.network.neutron [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 995.477584] env[62522]: DEBUG nova.network.neutron [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Updating instance_info_cache with network_info: [{"id": "47809969-d413-4587-acbe-3071b4ded420", "address": "fa:16:3e:d8:1d:76", "network": {"id": "4b04f6e1-0714-469b-9941-be6f5b6128d0", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-292476225-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e1ca5dfb8f2d4b2e932679e017fe8b3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47809969-d4", "ovs_interfaceid": "47809969-d413-4587-acbe-3071b4ded420", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.607466] env[62522]: DEBUG oslo_vmware.api [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415871, 'name': PowerOnVM_Task, 'duration_secs': 0.381207} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.608361] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 995.608742] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5342a4a1-a2c9-4ce2-9cf5-dff9fa0c9d9b tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Updating instance '917469c5-20be-4814-814f-a042415be021' progress to 100 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 995.774172] env[62522]: DEBUG nova.compute.utils [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 995.777910] env[62522]: DEBUG nova.compute.manager [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 995.778099] env[62522]: DEBUG nova.network.neutron [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 995.815534] env[62522]: DEBUG nova.policy [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '863553a271044c3a9cd70259a836c44f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '668fdbcae4a643a6ab24d1e616a637cc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 995.991131] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Releasing lock "refresh_cache-548364e9-b19a-4777-8e62-19b8a0594f36" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 995.991131] env[62522]: DEBUG nova.compute.manager [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Instance network_info: |[{"id": "47809969-d413-4587-acbe-3071b4ded420", "address": "fa:16:3e:d8:1d:76", "network": {"id": "4b04f6e1-0714-469b-9941-be6f5b6128d0", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-292476225-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e1ca5dfb8f2d4b2e932679e017fe8b3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47809969-d4", "ovs_interfaceid": "47809969-d413-4587-acbe-3071b4ded420", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 995.991131] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d8:1d:76', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8459aaf-d6a8-46fb-ad14-464ac3104695', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '47809969-d413-4587-acbe-3071b4ded420', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 996.004123] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Creating folder: Project (e1ca5dfb8f2d4b2e932679e017fe8b3e). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 996.004472] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-69cc4452-6bd5-4bb1-920e-7ae85b5a6006 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.014832] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Created folder: Project (e1ca5dfb8f2d4b2e932679e017fe8b3e) in parent group-v489562. [ 996.014832] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Creating folder: Instances. Parent ref: group-v489777. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 996.017104] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5e6c107a-57de-405d-a0d9-ddf286421215 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.025343] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Created folder: Instances in parent group-v489777. [ 996.025551] env[62522]: DEBUG oslo.service.loopingcall [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 996.026284] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 996.026284] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e214038c-184a-4449-8289-a9e157d41e60 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.053640] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 996.053640] env[62522]: value = "task-2415874" [ 996.053640] env[62522]: _type = "Task" [ 996.053640] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.062440] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415874, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.131090] env[62522]: DEBUG nova.network.neutron [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Successfully created port: ca82312a-dff9-4d56-af90-21b3984f4146 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 996.237516] env[62522]: DEBUG nova.compute.manager [req-4e763145-9217-4b15-abaa-494da2061e8c req-4e40fc89-b193-40d0-a6c5-9bde26a04377 service nova] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Received event network-changed-47809969-d413-4587-acbe-3071b4ded420 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 996.237713] env[62522]: DEBUG nova.compute.manager [req-4e763145-9217-4b15-abaa-494da2061e8c req-4e40fc89-b193-40d0-a6c5-9bde26a04377 service nova] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Refreshing instance network info cache due to event network-changed-47809969-d413-4587-acbe-3071b4ded420. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 996.237934] env[62522]: DEBUG oslo_concurrency.lockutils [req-4e763145-9217-4b15-abaa-494da2061e8c req-4e40fc89-b193-40d0-a6c5-9bde26a04377 service nova] Acquiring lock "refresh_cache-548364e9-b19a-4777-8e62-19b8a0594f36" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 996.238154] env[62522]: DEBUG oslo_concurrency.lockutils [req-4e763145-9217-4b15-abaa-494da2061e8c req-4e40fc89-b193-40d0-a6c5-9bde26a04377 service nova] Acquired lock "refresh_cache-548364e9-b19a-4777-8e62-19b8a0594f36" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.238334] env[62522]: DEBUG nova.network.neutron [req-4e763145-9217-4b15-abaa-494da2061e8c req-4e40fc89-b193-40d0-a6c5-9bde26a04377 service nova] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Refreshing network info cache for port 47809969-d413-4587-acbe-3071b4ded420 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 996.282026] env[62522]: DEBUG nova.compute.manager [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 996.337471] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-de2159b8-87ad-4ef6-9e5e-abe2a1c288c1 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Volume attach. Driver type: vmdk {{(pid=62522) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 996.338054] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-de2159b8-87ad-4ef6-9e5e-abe2a1c288c1 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489776', 'volume_id': '1f2482a0-4337-44cc-a788-681ae00783ea', 'name': 'volume-1f2482a0-4337-44cc-a788-681ae00783ea', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3c4c395c-0625-4569-990d-e2d4ad162c14', 'attached_at': '', 'detached_at': '', 'volume_id': '1f2482a0-4337-44cc-a788-681ae00783ea', 'serial': '1f2482a0-4337-44cc-a788-681ae00783ea'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 996.339125] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b541eebb-d1c4-468a-8db4-ffbcf86ff976 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.365418] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4330071a-f397-4c39-9e1d-38fdfc31830e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.390765] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-de2159b8-87ad-4ef6-9e5e-abe2a1c288c1 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] volume-1f2482a0-4337-44cc-a788-681ae00783ea/volume-1f2482a0-4337-44cc-a788-681ae00783ea.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 996.393698] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2015bcc6-12e3-43ec-b4bc-aa87da3efbf5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.412708] env[62522]: DEBUG oslo_vmware.api [None req-de2159b8-87ad-4ef6-9e5e-abe2a1c288c1 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 996.412708] env[62522]: value = "task-2415875" [ 996.412708] env[62522]: _type = "Task" [ 996.412708] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.421606] env[62522]: DEBUG oslo_vmware.api [None req-de2159b8-87ad-4ef6-9e5e-abe2a1c288c1 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2415875, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.560437] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415874, 'name': CreateVM_Task, 'duration_secs': 0.341665} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.563071] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 996.563940] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 996.564251] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.564774] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 996.564774] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25b5a611-1e5d-4bb4-92d4-a1a745bf18ce {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.569804] env[62522]: DEBUG oslo_vmware.api [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 996.569804] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52ede2d4-9d63-9d1e-2091-3f17004b4972" [ 996.569804] env[62522]: _type = "Task" [ 996.569804] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.578376] env[62522]: DEBUG oslo_vmware.api [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52ede2d4-9d63-9d1e-2091-3f17004b4972, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.648509] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf4c3dec-b295-4f4b-94c2-2135460e53dd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.655881] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49d34c66-e3a6-4fcf-9157-d2e3b086713b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.688086] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b34c052-72e9-4bb3-bc9d-82106cf44e52 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.696271] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5475ce7-2311-45b3-8e45-50b257c9d904 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.710693] env[62522]: DEBUG nova.compute.provider_tree [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 996.922480] env[62522]: DEBUG oslo_vmware.api [None req-de2159b8-87ad-4ef6-9e5e-abe2a1c288c1 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2415875, 'name': ReconfigVM_Task, 'duration_secs': 0.3848} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.922752] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-de2159b8-87ad-4ef6-9e5e-abe2a1c288c1 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Reconfigured VM instance instance-0000002d to attach disk [datastore1] volume-1f2482a0-4337-44cc-a788-681ae00783ea/volume-1f2482a0-4337-44cc-a788-681ae00783ea.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 996.927407] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9d059193-2faa-426e-b05d-ba8bb31b02e1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.948529] env[62522]: DEBUG oslo_vmware.api [None req-de2159b8-87ad-4ef6-9e5e-abe2a1c288c1 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 996.948529] env[62522]: value = "task-2415876" [ 996.948529] env[62522]: _type = "Task" [ 996.948529] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.957429] env[62522]: DEBUG oslo_vmware.api [None req-de2159b8-87ad-4ef6-9e5e-abe2a1c288c1 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2415876, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.079625] env[62522]: DEBUG oslo_vmware.api [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52ede2d4-9d63-9d1e-2091-3f17004b4972, 'name': SearchDatastore_Task, 'duration_secs': 0.009198} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.079934] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 997.080187] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 997.080414] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 997.080553] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.080755] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 997.081033] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ec687973-3293-44a1-ac35-1f3d67d10386 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.088685] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 997.088855] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 997.089579] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e9ff1a2-ac35-4ca7-9e30-39cac508037d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.094952] env[62522]: DEBUG oslo_vmware.api [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 997.094952] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526ff09f-8094-e663-d2b7-9cd4c2ec0f24" [ 997.094952] env[62522]: _type = "Task" [ 997.094952] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.097951] env[62522]: DEBUG nova.network.neutron [req-4e763145-9217-4b15-abaa-494da2061e8c req-4e40fc89-b193-40d0-a6c5-9bde26a04377 service nova] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Updated VIF entry in instance network info cache for port 47809969-d413-4587-acbe-3071b4ded420. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 997.098289] env[62522]: DEBUG nova.network.neutron [req-4e763145-9217-4b15-abaa-494da2061e8c req-4e40fc89-b193-40d0-a6c5-9bde26a04377 service nova] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Updating instance_info_cache with network_info: [{"id": "47809969-d413-4587-acbe-3071b4ded420", "address": "fa:16:3e:d8:1d:76", "network": {"id": "4b04f6e1-0714-469b-9941-be6f5b6128d0", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-292476225-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e1ca5dfb8f2d4b2e932679e017fe8b3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47809969-d4", "ovs_interfaceid": "47809969-d413-4587-acbe-3071b4ded420", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.103635] env[62522]: DEBUG oslo_vmware.api [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526ff09f-8094-e663-d2b7-9cd4c2ec0f24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.214057] env[62522]: DEBUG nova.scheduler.client.report [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 997.292655] env[62522]: DEBUG nova.compute.manager [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 997.317804] env[62522]: DEBUG nova.virt.hardware [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 997.318082] env[62522]: DEBUG nova.virt.hardware [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 997.318246] env[62522]: DEBUG nova.virt.hardware [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 997.318429] env[62522]: DEBUG nova.virt.hardware [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 997.318629] env[62522]: DEBUG nova.virt.hardware [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 997.318756] env[62522]: DEBUG nova.virt.hardware [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 997.318973] env[62522]: DEBUG nova.virt.hardware [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 997.319231] env[62522]: DEBUG nova.virt.hardware [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 997.319535] env[62522]: DEBUG nova.virt.hardware [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 997.319623] env[62522]: DEBUG nova.virt.hardware [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 997.319799] env[62522]: DEBUG nova.virt.hardware [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 997.322477] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf5ce67-36dd-4749-877e-bb02f789f087 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.330666] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39512cb2-8819-45fb-8f86-9e0d8aa29e7e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.459049] env[62522]: DEBUG oslo_vmware.api [None req-de2159b8-87ad-4ef6-9e5e-abe2a1c288c1 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2415876, 'name': ReconfigVM_Task, 'duration_secs': 0.139899} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.459393] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-de2159b8-87ad-4ef6-9e5e-abe2a1c288c1 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489776', 'volume_id': '1f2482a0-4337-44cc-a788-681ae00783ea', 'name': 'volume-1f2482a0-4337-44cc-a788-681ae00783ea', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3c4c395c-0625-4569-990d-e2d4ad162c14', 'attached_at': '', 'detached_at': '', 'volume_id': '1f2482a0-4337-44cc-a788-681ae00783ea', 'serial': '1f2482a0-4337-44cc-a788-681ae00783ea'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 997.484830] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e9d397a8-8bb8-4a3d-abdb-1fa2e4be5766 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "917469c5-20be-4814-814f-a042415be021" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.484830] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e9d397a8-8bb8-4a3d-abdb-1fa2e4be5766 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "917469c5-20be-4814-814f-a042415be021" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 997.485023] env[62522]: DEBUG nova.compute.manager [None req-e9d397a8-8bb8-4a3d-abdb-1fa2e4be5766 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Going to confirm migration 3 {{(pid=62522) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 997.600439] env[62522]: DEBUG oslo_concurrency.lockutils [req-4e763145-9217-4b15-abaa-494da2061e8c req-4e40fc89-b193-40d0-a6c5-9bde26a04377 service nova] Releasing lock "refresh_cache-548364e9-b19a-4777-8e62-19b8a0594f36" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 997.605945] env[62522]: DEBUG oslo_vmware.api [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526ff09f-8094-e663-d2b7-9cd4c2ec0f24, 'name': SearchDatastore_Task, 'duration_secs': 0.008518} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.606650] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee703e44-e211-48ba-84d3-9ceab9282b13 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.612119] env[62522]: DEBUG oslo_vmware.api [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 997.612119] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f05898-d29f-1c2b-f589-0cf950463efe" [ 997.612119] env[62522]: _type = "Task" [ 997.612119] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.620420] env[62522]: DEBUG oslo_vmware.api [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f05898-d29f-1c2b-f589-0cf950463efe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.648255] env[62522]: DEBUG nova.network.neutron [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Successfully updated port: ca82312a-dff9-4d56-af90-21b3984f4146 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 997.720036] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.452s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 997.720247] env[62522]: DEBUG nova.compute.manager [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 997.722895] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.634s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 997.724352] env[62522]: INFO nova.compute.claims [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 998.070472] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e9d397a8-8bb8-4a3d-abdb-1fa2e4be5766 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "refresh_cache-917469c5-20be-4814-814f-a042415be021" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.070472] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e9d397a8-8bb8-4a3d-abdb-1fa2e4be5766 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquired lock "refresh_cache-917469c5-20be-4814-814f-a042415be021" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.070472] env[62522]: DEBUG nova.network.neutron [None req-e9d397a8-8bb8-4a3d-abdb-1fa2e4be5766 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 998.070472] env[62522]: DEBUG nova.objects.instance [None req-e9d397a8-8bb8-4a3d-abdb-1fa2e4be5766 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lazy-loading 'info_cache' on Instance uuid 917469c5-20be-4814-814f-a042415be021 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 998.123491] env[62522]: DEBUG oslo_vmware.api [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f05898-d29f-1c2b-f589-0cf950463efe, 'name': SearchDatastore_Task, 'duration_secs': 0.010574} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.123933] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 998.124330] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 548364e9-b19a-4777-8e62-19b8a0594f36/548364e9-b19a-4777-8e62-19b8a0594f36.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 998.124850] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dbd5a41c-c5b5-45e4-8cf3-5dbe364882e2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.131778] env[62522]: DEBUG oslo_vmware.api [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 998.131778] env[62522]: value = "task-2415877" [ 998.131778] env[62522]: _type = "Task" [ 998.131778] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.140094] env[62522]: DEBUG oslo_vmware.api [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415877, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.151159] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Acquiring lock "refresh_cache-895e6716-44cf-45b2-afd8-eaba71c32460" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.151269] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Acquired lock "refresh_cache-895e6716-44cf-45b2-afd8-eaba71c32460" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.151451] env[62522]: DEBUG nova.network.neutron [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 998.228550] env[62522]: DEBUG nova.compute.utils [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 998.234465] env[62522]: DEBUG nova.compute.manager [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 998.234465] env[62522]: DEBUG nova.network.neutron [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 998.276766] env[62522]: DEBUG nova.policy [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9694ee575d094ccf845eb57acf3e70c6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '00b27498c07344d1bf9cecefa0fca033', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 998.378694] env[62522]: DEBUG nova.compute.manager [req-20bfbd5a-8a61-49ee-85e5-4d818b307d6a req-1960735b-7224-4a24-a477-be68a71970fa service nova] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Received event network-vif-plugged-ca82312a-dff9-4d56-af90-21b3984f4146 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 998.379083] env[62522]: DEBUG oslo_concurrency.lockutils [req-20bfbd5a-8a61-49ee-85e5-4d818b307d6a req-1960735b-7224-4a24-a477-be68a71970fa service nova] Acquiring lock "895e6716-44cf-45b2-afd8-eaba71c32460-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 998.379379] env[62522]: DEBUG oslo_concurrency.lockutils [req-20bfbd5a-8a61-49ee-85e5-4d818b307d6a req-1960735b-7224-4a24-a477-be68a71970fa service nova] Lock "895e6716-44cf-45b2-afd8-eaba71c32460-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.379543] env[62522]: DEBUG oslo_concurrency.lockutils [req-20bfbd5a-8a61-49ee-85e5-4d818b307d6a req-1960735b-7224-4a24-a477-be68a71970fa service nova] Lock "895e6716-44cf-45b2-afd8-eaba71c32460-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.379750] env[62522]: DEBUG nova.compute.manager [req-20bfbd5a-8a61-49ee-85e5-4d818b307d6a req-1960735b-7224-4a24-a477-be68a71970fa service nova] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] No waiting events found dispatching network-vif-plugged-ca82312a-dff9-4d56-af90-21b3984f4146 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 998.379931] env[62522]: WARNING nova.compute.manager [req-20bfbd5a-8a61-49ee-85e5-4d818b307d6a req-1960735b-7224-4a24-a477-be68a71970fa service nova] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Received unexpected event network-vif-plugged-ca82312a-dff9-4d56-af90-21b3984f4146 for instance with vm_state building and task_state spawning. [ 998.380887] env[62522]: DEBUG nova.compute.manager [req-20bfbd5a-8a61-49ee-85e5-4d818b307d6a req-1960735b-7224-4a24-a477-be68a71970fa service nova] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Received event network-changed-ca82312a-dff9-4d56-af90-21b3984f4146 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 998.381125] env[62522]: DEBUG nova.compute.manager [req-20bfbd5a-8a61-49ee-85e5-4d818b307d6a req-1960735b-7224-4a24-a477-be68a71970fa service nova] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Refreshing instance network info cache due to event network-changed-ca82312a-dff9-4d56-af90-21b3984f4146. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 998.381317] env[62522]: DEBUG oslo_concurrency.lockutils [req-20bfbd5a-8a61-49ee-85e5-4d818b307d6a req-1960735b-7224-4a24-a477-be68a71970fa service nova] Acquiring lock "refresh_cache-895e6716-44cf-45b2-afd8-eaba71c32460" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.507400] env[62522]: DEBUG nova.objects.instance [None req-de2159b8-87ad-4ef6-9e5e-abe2a1c288c1 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lazy-loading 'flavor' on Instance uuid 3c4c395c-0625-4569-990d-e2d4ad162c14 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 998.605374] env[62522]: DEBUG nova.network.neutron [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Successfully created port: cb27129b-6e1b-4340-8b38-2b33a9c6c83c {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 998.642752] env[62522]: DEBUG oslo_vmware.api [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415877, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497657} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.642752] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 548364e9-b19a-4777-8e62-19b8a0594f36/548364e9-b19a-4777-8e62-19b8a0594f36.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 998.642752] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 998.642752] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0d82fcf9-0b8b-4122-a477-a418721ad2d8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.648245] env[62522]: DEBUG oslo_vmware.api [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 998.648245] env[62522]: value = "task-2415878" [ 998.648245] env[62522]: _type = "Task" [ 998.648245] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.658363] env[62522]: DEBUG oslo_vmware.api [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415878, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.662730] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c59845e9-5e10-4636-9490-65abd9c48614 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquiring lock "3c4c395c-0625-4569-990d-e2d4ad162c14" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 998.700112] env[62522]: DEBUG nova.network.neutron [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 998.739769] env[62522]: DEBUG nova.compute.manager [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 998.914693] env[62522]: DEBUG nova.network.neutron [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Updating instance_info_cache with network_info: [{"id": "ca82312a-dff9-4d56-af90-21b3984f4146", "address": "fa:16:3e:da:7e:3a", "network": {"id": "23abb62b-e824-40be-9189-f52dff8f7152", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-184467061-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "668fdbcae4a643a6ab24d1e616a637cc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ce17e10e-2fb0-4191-afee-e2b89fa15074", "external-id": "nsx-vlan-transportzone-352", "segmentation_id": 352, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca82312a-df", "ovs_interfaceid": "ca82312a-dff9-4d56-af90-21b3984f4146", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.015103] env[62522]: DEBUG oslo_concurrency.lockutils [None req-de2159b8-87ad-4ef6-9e5e-abe2a1c288c1 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "3c4c395c-0625-4569-990d-e2d4ad162c14" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.778s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.016252] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c59845e9-5e10-4636-9490-65abd9c48614 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "3c4c395c-0625-4569-990d-e2d4ad162c14" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.353s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.016347] env[62522]: DEBUG nova.compute.manager [None req-c59845e9-5e10-4636-9490-65abd9c48614 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 999.017441] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1de8b48-2ea1-49b8-91aa-4fc1bc752f5a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.026683] env[62522]: DEBUG nova.compute.manager [None req-c59845e9-5e10-4636-9490-65abd9c48614 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62522) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 999.026683] env[62522]: DEBUG nova.objects.instance [None req-c59845e9-5e10-4636-9490-65abd9c48614 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lazy-loading 'flavor' on Instance uuid 3c4c395c-0625-4569-990d-e2d4ad162c14 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 999.084421] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3cc6f86-71fc-461e-97eb-aa6ef8a0561a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.092514] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef2075e4-40e2-4394-8c50-de4f24b03dbf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.124096] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12c66ab9-6b7a-4185-b1eb-ceee49cb0700 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.131594] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5666586-9520-4f91-8066-82ecf99befa3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.146765] env[62522]: DEBUG nova.compute.provider_tree [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 999.156188] env[62522]: DEBUG oslo_vmware.api [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415878, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066242} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.156450] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 999.157278] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9cf9fe8-bc2b-49a0-9ad6-86cd1d980126 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.180463] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] 548364e9-b19a-4777-8e62-19b8a0594f36/548364e9-b19a-4777-8e62-19b8a0594f36.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 999.181017] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f9125bc6-b582-440b-aed3-88187a41661f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.200718] env[62522]: DEBUG oslo_vmware.api [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 999.200718] env[62522]: value = "task-2415879" [ 999.200718] env[62522]: _type = "Task" [ 999.200718] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.208155] env[62522]: DEBUG oslo_vmware.api [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415879, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.418361] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Releasing lock "refresh_cache-895e6716-44cf-45b2-afd8-eaba71c32460" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 999.418696] env[62522]: DEBUG nova.compute.manager [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Instance network_info: |[{"id": "ca82312a-dff9-4d56-af90-21b3984f4146", "address": "fa:16:3e:da:7e:3a", "network": {"id": "23abb62b-e824-40be-9189-f52dff8f7152", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-184467061-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "668fdbcae4a643a6ab24d1e616a637cc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ce17e10e-2fb0-4191-afee-e2b89fa15074", "external-id": "nsx-vlan-transportzone-352", "segmentation_id": 352, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca82312a-df", "ovs_interfaceid": "ca82312a-dff9-4d56-af90-21b3984f4146", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 999.418994] env[62522]: DEBUG oslo_concurrency.lockutils [req-20bfbd5a-8a61-49ee-85e5-4d818b307d6a req-1960735b-7224-4a24-a477-be68a71970fa service nova] Acquired lock "refresh_cache-895e6716-44cf-45b2-afd8-eaba71c32460" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.419210] env[62522]: DEBUG nova.network.neutron [req-20bfbd5a-8a61-49ee-85e5-4d818b307d6a req-1960735b-7224-4a24-a477-be68a71970fa service nova] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Refreshing network info cache for port ca82312a-dff9-4d56-af90-21b3984f4146 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 999.420314] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:da:7e:3a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ce17e10e-2fb0-4191-afee-e2b89fa15074', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ca82312a-dff9-4d56-af90-21b3984f4146', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 999.427913] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Creating folder: Project (668fdbcae4a643a6ab24d1e616a637cc). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 999.429090] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b994466b-7873-4f3b-bdfa-2927d4235fe7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.442571] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Created folder: Project (668fdbcae4a643a6ab24d1e616a637cc) in parent group-v489562. [ 999.442875] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Creating folder: Instances. Parent ref: group-v489780. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 999.443206] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7b282233-28ef-4baa-a17b-0f2edeb8880e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.453678] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Created folder: Instances in parent group-v489780. [ 999.455444] env[62522]: DEBUG oslo.service.loopingcall [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 999.455444] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 999.455444] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5d5a82b3-1c2d-454a-8522-4964f9b6ba26 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.486916] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 999.486916] env[62522]: value = "task-2415882" [ 999.486916] env[62522]: _type = "Task" [ 999.486916] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.497308] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415882, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.650407] env[62522]: DEBUG nova.scheduler.client.report [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 999.708142] env[62522]: DEBUG nova.network.neutron [None req-e9d397a8-8bb8-4a3d-abdb-1fa2e4be5766 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Updating instance_info_cache with network_info: [{"id": "195b1951-c091-4db1-82d8-3c20dfcaf6d1", "address": "fa:16:3e:9c:63:e3", "network": {"id": "70e81afa-0eda-49c5-b072-e79b3c287468", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1715169983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff5da278d2be4ca983424c8291beadec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7654928b-7afe-42e3-a18d-68ecc775cefe", "external-id": "cl2-zone-807", "segmentation_id": 807, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap195b1951-c0", "ovs_interfaceid": "195b1951-c091-4db1-82d8-3c20dfcaf6d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.713627] env[62522]: DEBUG oslo_vmware.api [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415879, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.750485] env[62522]: DEBUG nova.compute.manager [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 999.777354] env[62522]: DEBUG nova.virt.hardware [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 999.777647] env[62522]: DEBUG nova.virt.hardware [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 999.777826] env[62522]: DEBUG nova.virt.hardware [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 999.778186] env[62522]: DEBUG nova.virt.hardware [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 999.778421] env[62522]: DEBUG nova.virt.hardware [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 999.778596] env[62522]: DEBUG nova.virt.hardware [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 999.778817] env[62522]: DEBUG nova.virt.hardware [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 999.778986] env[62522]: DEBUG nova.virt.hardware [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 999.779237] env[62522]: DEBUG nova.virt.hardware [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 999.779380] env[62522]: DEBUG nova.virt.hardware [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 999.779577] env[62522]: DEBUG nova.virt.hardware [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 999.780536] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e70b3e0-bcf3-4844-9431-9f228929df6e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.790469] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd08236-7735-4c5b-8778-eff02f25bb33 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.997498] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415882, 'name': CreateVM_Task, 'duration_secs': 0.331729} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.997675] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 999.998398] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 999.999255] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.999255] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 999.999255] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-643bcb70-6d50-428c-a5d6-968db4f6ddf0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.003923] env[62522]: DEBUG oslo_vmware.api [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Waiting for the task: (returnval){ [ 1000.003923] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5255c0de-2050-70e4-b0d9-7d573cd3698c" [ 1000.003923] env[62522]: _type = "Task" [ 1000.003923] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.011786] env[62522]: DEBUG oslo_vmware.api [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5255c0de-2050-70e4-b0d9-7d573cd3698c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.037570] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c59845e9-5e10-4636-9490-65abd9c48614 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1000.038071] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d5216d79-de7f-490b-b747-d7011b4b9de1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.043978] env[62522]: DEBUG oslo_vmware.api [None req-c59845e9-5e10-4636-9490-65abd9c48614 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 1000.043978] env[62522]: value = "task-2415883" [ 1000.043978] env[62522]: _type = "Task" [ 1000.043978] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.054404] env[62522]: DEBUG oslo_vmware.api [None req-c59845e9-5e10-4636-9490-65abd9c48614 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2415883, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.111221] env[62522]: DEBUG nova.network.neutron [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Successfully updated port: cb27129b-6e1b-4340-8b38-2b33a9c6c83c {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1000.158214] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.433s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.158214] env[62522]: DEBUG nova.compute.manager [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1000.159102] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.735s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.159455] env[62522]: DEBUG nova.objects.instance [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lazy-loading 'resources' on Instance uuid 879354d3-7423-41e2-93f6-0d8d3a120170 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1000.213582] env[62522]: DEBUG oslo_vmware.api [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415879, 'name': ReconfigVM_Task, 'duration_secs': 0.942141} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.214114] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Reconfigured VM instance instance-0000004f to attach disk [datastore2] 548364e9-b19a-4777-8e62-19b8a0594f36/548364e9-b19a-4777-8e62-19b8a0594f36.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1000.215330] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e9d397a8-8bb8-4a3d-abdb-1fa2e4be5766 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Releasing lock "refresh_cache-917469c5-20be-4814-814f-a042415be021" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1000.215633] env[62522]: DEBUG nova.objects.instance [None req-e9d397a8-8bb8-4a3d-abdb-1fa2e4be5766 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lazy-loading 'migration_context' on Instance uuid 917469c5-20be-4814-814f-a042415be021 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1000.216912] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7a1728a3-5638-4f0a-83da-de1796da5e15 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.226719] env[62522]: DEBUG oslo_vmware.api [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1000.226719] env[62522]: value = "task-2415884" [ 1000.226719] env[62522]: _type = "Task" [ 1000.226719] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.237804] env[62522]: DEBUG oslo_vmware.api [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415884, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.327267] env[62522]: DEBUG nova.network.neutron [req-20bfbd5a-8a61-49ee-85e5-4d818b307d6a req-1960735b-7224-4a24-a477-be68a71970fa service nova] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Updated VIF entry in instance network info cache for port ca82312a-dff9-4d56-af90-21b3984f4146. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1000.327669] env[62522]: DEBUG nova.network.neutron [req-20bfbd5a-8a61-49ee-85e5-4d818b307d6a req-1960735b-7224-4a24-a477-be68a71970fa service nova] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Updating instance_info_cache with network_info: [{"id": "ca82312a-dff9-4d56-af90-21b3984f4146", "address": "fa:16:3e:da:7e:3a", "network": {"id": "23abb62b-e824-40be-9189-f52dff8f7152", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-184467061-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "668fdbcae4a643a6ab24d1e616a637cc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ce17e10e-2fb0-4191-afee-e2b89fa15074", "external-id": "nsx-vlan-transportzone-352", "segmentation_id": 352, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca82312a-df", "ovs_interfaceid": "ca82312a-dff9-4d56-af90-21b3984f4146", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.478127] env[62522]: DEBUG nova.compute.manager [req-f161f1eb-f72f-4091-8bc8-ce2b6f69c0f5 req-85cd8125-819e-41bc-b041-2128b464045f service nova] [instance: 02708991-7f71-408e-89d8-932b845553d1] Received event network-vif-plugged-cb27129b-6e1b-4340-8b38-2b33a9c6c83c {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1000.478127] env[62522]: DEBUG oslo_concurrency.lockutils [req-f161f1eb-f72f-4091-8bc8-ce2b6f69c0f5 req-85cd8125-819e-41bc-b041-2128b464045f service nova] Acquiring lock "02708991-7f71-408e-89d8-932b845553d1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.478127] env[62522]: DEBUG oslo_concurrency.lockutils [req-f161f1eb-f72f-4091-8bc8-ce2b6f69c0f5 req-85cd8125-819e-41bc-b041-2128b464045f service nova] Lock "02708991-7f71-408e-89d8-932b845553d1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.478127] env[62522]: DEBUG oslo_concurrency.lockutils [req-f161f1eb-f72f-4091-8bc8-ce2b6f69c0f5 req-85cd8125-819e-41bc-b041-2128b464045f service nova] Lock "02708991-7f71-408e-89d8-932b845553d1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.478127] env[62522]: DEBUG nova.compute.manager [req-f161f1eb-f72f-4091-8bc8-ce2b6f69c0f5 req-85cd8125-819e-41bc-b041-2128b464045f service nova] [instance: 02708991-7f71-408e-89d8-932b845553d1] No waiting events found dispatching network-vif-plugged-cb27129b-6e1b-4340-8b38-2b33a9c6c83c {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1000.478326] env[62522]: WARNING nova.compute.manager [req-f161f1eb-f72f-4091-8bc8-ce2b6f69c0f5 req-85cd8125-819e-41bc-b041-2128b464045f service nova] [instance: 02708991-7f71-408e-89d8-932b845553d1] Received unexpected event network-vif-plugged-cb27129b-6e1b-4340-8b38-2b33a9c6c83c for instance with vm_state building and task_state spawning. [ 1000.478433] env[62522]: DEBUG nova.compute.manager [req-f161f1eb-f72f-4091-8bc8-ce2b6f69c0f5 req-85cd8125-819e-41bc-b041-2128b464045f service nova] [instance: 02708991-7f71-408e-89d8-932b845553d1] Received event network-changed-cb27129b-6e1b-4340-8b38-2b33a9c6c83c {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1000.478597] env[62522]: DEBUG nova.compute.manager [req-f161f1eb-f72f-4091-8bc8-ce2b6f69c0f5 req-85cd8125-819e-41bc-b041-2128b464045f service nova] [instance: 02708991-7f71-408e-89d8-932b845553d1] Refreshing instance network info cache due to event network-changed-cb27129b-6e1b-4340-8b38-2b33a9c6c83c. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1000.478823] env[62522]: DEBUG oslo_concurrency.lockutils [req-f161f1eb-f72f-4091-8bc8-ce2b6f69c0f5 req-85cd8125-819e-41bc-b041-2128b464045f service nova] Acquiring lock "refresh_cache-02708991-7f71-408e-89d8-932b845553d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1000.478973] env[62522]: DEBUG oslo_concurrency.lockutils [req-f161f1eb-f72f-4091-8bc8-ce2b6f69c0f5 req-85cd8125-819e-41bc-b041-2128b464045f service nova] Acquired lock "refresh_cache-02708991-7f71-408e-89d8-932b845553d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.479127] env[62522]: DEBUG nova.network.neutron [req-f161f1eb-f72f-4091-8bc8-ce2b6f69c0f5 req-85cd8125-819e-41bc-b041-2128b464045f service nova] [instance: 02708991-7f71-408e-89d8-932b845553d1] Refreshing network info cache for port cb27129b-6e1b-4340-8b38-2b33a9c6c83c {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1000.514651] env[62522]: DEBUG oslo_vmware.api [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5255c0de-2050-70e4-b0d9-7d573cd3698c, 'name': SearchDatastore_Task, 'duration_secs': 0.009062} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.515019] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1000.515367] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1000.515609] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1000.515808] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.516044] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1000.516298] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7185688d-a2fe-46e8-93e6-1065c0e6fe94 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.524232] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1000.524407] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1000.525129] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-259957e6-4ef2-47a5-93ea-3f20f316d304 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.529937] env[62522]: DEBUG oslo_vmware.api [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Waiting for the task: (returnval){ [ 1000.529937] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52606598-b60b-3c47-8b87-fbe0cb8238e6" [ 1000.529937] env[62522]: _type = "Task" [ 1000.529937] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.537363] env[62522]: DEBUG oslo_vmware.api [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52606598-b60b-3c47-8b87-fbe0cb8238e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.552136] env[62522]: DEBUG oslo_vmware.api [None req-c59845e9-5e10-4636-9490-65abd9c48614 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2415883, 'name': PowerOffVM_Task, 'duration_secs': 0.221699} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.552390] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c59845e9-5e10-4636-9490-65abd9c48614 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1000.552587] env[62522]: DEBUG nova.compute.manager [None req-c59845e9-5e10-4636-9490-65abd9c48614 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1000.553383] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c4f1aa-d232-4202-aa77-a9b2e9d8a8ce {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.620143] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "refresh_cache-02708991-7f71-408e-89d8-932b845553d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1000.663268] env[62522]: DEBUG nova.compute.utils [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1000.664739] env[62522]: DEBUG nova.compute.manager [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1000.664847] env[62522]: DEBUG nova.network.neutron [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1000.718033] env[62522]: DEBUG nova.policy [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3549d85b612044969af8fda179d169ba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61314d3f0b9e4c368312e714a953e549', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1000.719870] env[62522]: DEBUG nova.objects.base [None req-e9d397a8-8bb8-4a3d-abdb-1fa2e4be5766 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Object Instance<917469c5-20be-4814-814f-a042415be021> lazy-loaded attributes: info_cache,migration_context {{(pid=62522) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1000.720834] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07f2096d-4711-4adf-bff7-34f1330bf819 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.747331] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5a25ee2-16fc-4dfb-afc1-969b4965116c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.752800] env[62522]: DEBUG oslo_vmware.api [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415884, 'name': Rename_Task, 'duration_secs': 0.264666} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.753648] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1000.753806] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4b7993dc-100b-49c5-b716-506894453e2c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.756270] env[62522]: DEBUG oslo_vmware.api [None req-e9d397a8-8bb8-4a3d-abdb-1fa2e4be5766 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 1000.756270] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]523d1daf-e210-a3bf-fb54-0c5f3b2cef54" [ 1000.756270] env[62522]: _type = "Task" [ 1000.756270] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.763021] env[62522]: DEBUG oslo_vmware.api [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1000.763021] env[62522]: value = "task-2415885" [ 1000.763021] env[62522]: _type = "Task" [ 1000.763021] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.767673] env[62522]: DEBUG oslo_vmware.api [None req-e9d397a8-8bb8-4a3d-abdb-1fa2e4be5766 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]523d1daf-e210-a3bf-fb54-0c5f3b2cef54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.774780] env[62522]: DEBUG oslo_vmware.api [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415885, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.831216] env[62522]: DEBUG oslo_concurrency.lockutils [req-20bfbd5a-8a61-49ee-85e5-4d818b307d6a req-1960735b-7224-4a24-a477-be68a71970fa service nova] Releasing lock "refresh_cache-895e6716-44cf-45b2-afd8-eaba71c32460" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1000.999240] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff51dd7f-c469-4d52-882b-c95292b08763 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.007735] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-205facf2-5f27-4f51-8a12-9a2490ebf0da {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.012270] env[62522]: DEBUG nova.network.neutron [req-f161f1eb-f72f-4091-8bc8-ce2b6f69c0f5 req-85cd8125-819e-41bc-b041-2128b464045f service nova] [instance: 02708991-7f71-408e-89d8-932b845553d1] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1001.054139] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e42790f-a238-47be-803c-19d307c283aa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.060064] env[62522]: DEBUG oslo_vmware.api [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52606598-b60b-3c47-8b87-fbe0cb8238e6, 'name': SearchDatastore_Task, 'duration_secs': 0.008424} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.064833] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9195d4d-3dc9-4561-a016-7eb6cec2939f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.067881] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6239bd08-94b5-4ab9-bb6f-f4972d58e5b2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.073042] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c59845e9-5e10-4636-9490-65abd9c48614 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "3c4c395c-0625-4569-990d-e2d4ad162c14" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.056s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.076528] env[62522]: DEBUG oslo_vmware.api [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Waiting for the task: (returnval){ [ 1001.076528] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520eb418-a2e2-265c-d35a-6ff504bab526" [ 1001.076528] env[62522]: _type = "Task" [ 1001.076528] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.084895] env[62522]: DEBUG nova.compute.provider_tree [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1001.096494] env[62522]: DEBUG oslo_vmware.api [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520eb418-a2e2-265c-d35a-6ff504bab526, 'name': SearchDatastore_Task, 'duration_secs': 0.010737} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.096906] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.097284] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 895e6716-44cf-45b2-afd8-eaba71c32460/895e6716-44cf-45b2-afd8-eaba71c32460.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1001.097436] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ece5c081-6bfe-42ee-a636-a2354d748dd6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.108738] env[62522]: DEBUG oslo_vmware.api [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Waiting for the task: (returnval){ [ 1001.108738] env[62522]: value = "task-2415886" [ 1001.108738] env[62522]: _type = "Task" [ 1001.108738] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.119157] env[62522]: DEBUG oslo_vmware.api [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Task: {'id': task-2415886, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.148016] env[62522]: DEBUG nova.network.neutron [req-f161f1eb-f72f-4091-8bc8-ce2b6f69c0f5 req-85cd8125-819e-41bc-b041-2128b464045f service nova] [instance: 02708991-7f71-408e-89d8-932b845553d1] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.169439] env[62522]: DEBUG nova.compute.manager [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1001.236882] env[62522]: DEBUG nova.network.neutron [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Successfully created port: f98850e9-37f2-496a-8b2e-590c3d8b7f5e {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1001.268690] env[62522]: DEBUG oslo_vmware.api [None req-e9d397a8-8bb8-4a3d-abdb-1fa2e4be5766 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]523d1daf-e210-a3bf-fb54-0c5f3b2cef54, 'name': SearchDatastore_Task, 'duration_secs': 0.016764} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.269383] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e9d397a8-8bb8-4a3d-abdb-1fa2e4be5766 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.274153] env[62522]: DEBUG oslo_vmware.api [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415885, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.591474] env[62522]: DEBUG nova.scheduler.client.report [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1001.613337] env[62522]: DEBUG nova.objects.instance [None req-93577fda-8e37-4180-a21b-9a60301a2d39 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lazy-loading 'flavor' on Instance uuid 3c4c395c-0625-4569-990d-e2d4ad162c14 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1001.626970] env[62522]: DEBUG oslo_vmware.api [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Task: {'id': task-2415886, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.650190] env[62522]: DEBUG oslo_concurrency.lockutils [req-f161f1eb-f72f-4091-8bc8-ce2b6f69c0f5 req-85cd8125-819e-41bc-b041-2128b464045f service nova] Releasing lock "refresh_cache-02708991-7f71-408e-89d8-932b845553d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.651385] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquired lock "refresh_cache-02708991-7f71-408e-89d8-932b845553d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.651631] env[62522]: DEBUG nova.network.neutron [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1001.773824] env[62522]: DEBUG oslo_vmware.api [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415885, 'name': PowerOnVM_Task, 'duration_secs': 0.654812} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.774524] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1001.774863] env[62522]: INFO nova.compute.manager [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Took 7.86 seconds to spawn the instance on the hypervisor. [ 1001.775161] env[62522]: DEBUG nova.compute.manager [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1001.776076] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e81d5e8e-c9d2-4bda-9e4e-20f442db7e88 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.101337] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.942s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.104431] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.048s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.104689] env[62522]: DEBUG nova.objects.instance [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Lazy-loading 'resources' on Instance uuid ec2d78cf-15f9-441b-9800-8fcc513f7774 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1002.123659] env[62522]: DEBUG oslo_vmware.api [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Task: {'id': task-2415886, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.124439] env[62522]: DEBUG oslo_concurrency.lockutils [None req-93577fda-8e37-4180-a21b-9a60301a2d39 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquiring lock "refresh_cache-3c4c395c-0625-4569-990d-e2d4ad162c14" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1002.124602] env[62522]: DEBUG oslo_concurrency.lockutils [None req-93577fda-8e37-4180-a21b-9a60301a2d39 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquired lock "refresh_cache-3c4c395c-0625-4569-990d-e2d4ad162c14" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.124773] env[62522]: DEBUG nova.network.neutron [None req-93577fda-8e37-4180-a21b-9a60301a2d39 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1002.124955] env[62522]: DEBUG nova.objects.instance [None req-93577fda-8e37-4180-a21b-9a60301a2d39 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lazy-loading 'info_cache' on Instance uuid 3c4c395c-0625-4569-990d-e2d4ad162c14 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1002.126715] env[62522]: INFO nova.scheduler.client.report [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Deleted allocations for instance 879354d3-7423-41e2-93f6-0d8d3a120170 [ 1002.175975] env[62522]: DEBUG nova.compute.manager [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1002.184695] env[62522]: DEBUG nova.network.neutron [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1002.201804] env[62522]: DEBUG nova.virt.hardware [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='1c753bba3b4a95ff9bbe22700c2647fd',container_format='bare',created_at=2025-02-10T12:24:45Z,direct_url=,disk_format='vmdk',id=1f48c333-83a0-4c83-a8d3-3650d8a5edd4,min_disk=1,min_ram=0,name='tempest-test-snap-1304662379',owner='61314d3f0b9e4c368312e714a953e549',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2025-02-10T12:25:00Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1002.202062] env[62522]: DEBUG nova.virt.hardware [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1002.202224] env[62522]: DEBUG nova.virt.hardware [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1002.202405] env[62522]: DEBUG nova.virt.hardware [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1002.202551] env[62522]: DEBUG nova.virt.hardware [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1002.202694] env[62522]: DEBUG nova.virt.hardware [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1002.202896] env[62522]: DEBUG nova.virt.hardware [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1002.203071] env[62522]: DEBUG nova.virt.hardware [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1002.203287] env[62522]: DEBUG nova.virt.hardware [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1002.203456] env[62522]: DEBUG nova.virt.hardware [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1002.203626] env[62522]: DEBUG nova.virt.hardware [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1002.204478] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edd20507-7ee2-43d7-adba-a1f668d4c2a3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.214174] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30f72e9d-23e5-4228-9d13-3312acf7836e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.292128] env[62522]: INFO nova.compute.manager [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Took 20.38 seconds to build instance. [ 1002.334532] env[62522]: DEBUG nova.network.neutron [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Updating instance_info_cache with network_info: [{"id": "cb27129b-6e1b-4340-8b38-2b33a9c6c83c", "address": "fa:16:3e:53:d2:a1", "network": {"id": "2037da36-cd13-4cb1-95f4-08d1e174336c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1895994075-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00b27498c07344d1bf9cecefa0fca033", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb27129b-6e", "ovs_interfaceid": "cb27129b-6e1b-4340-8b38-2b33a9c6c83c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.624307] env[62522]: DEBUG oslo_vmware.api [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Task: {'id': task-2415886, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.629820] env[62522]: DEBUG nova.objects.base [None req-93577fda-8e37-4180-a21b-9a60301a2d39 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Object Instance<3c4c395c-0625-4569-990d-e2d4ad162c14> lazy-loaded attributes: flavor,info_cache {{(pid=62522) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1002.637737] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fb8d3cb9-81b7-4846-b6e0-3e761629b859 tempest-MigrationsAdminTest-1135262744 tempest-MigrationsAdminTest-1135262744-project-member] Lock "879354d3-7423-41e2-93f6-0d8d3a120170" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.711s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.794779] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8f3cf7ab-25b8-46ba-9cf9-6f2f8bf38f6e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Lock "548364e9-b19a-4777-8e62-19b8a0594f36" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.900s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.836641] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Releasing lock "refresh_cache-02708991-7f71-408e-89d8-932b845553d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1002.837048] env[62522]: DEBUG nova.compute.manager [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Instance network_info: |[{"id": "cb27129b-6e1b-4340-8b38-2b33a9c6c83c", "address": "fa:16:3e:53:d2:a1", "network": {"id": "2037da36-cd13-4cb1-95f4-08d1e174336c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1895994075-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00b27498c07344d1bf9cecefa0fca033", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb27129b-6e", "ovs_interfaceid": "cb27129b-6e1b-4340-8b38-2b33a9c6c83c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1002.837352] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:53:d2:a1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f65996a3-f865-4492-9377-cd14ec8b3aae', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cb27129b-6e1b-4340-8b38-2b33a9c6c83c', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1002.844944] env[62522]: DEBUG oslo.service.loopingcall [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1002.847822] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02708991-7f71-408e-89d8-932b845553d1] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1002.848235] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b4842492-a7fe-4014-b65c-02655002d5db {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.883413] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1002.883413] env[62522]: value = "task-2415887" [ 1002.883413] env[62522]: _type = "Task" [ 1002.883413] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.894629] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415887, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.919068] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3445f8e-b273-4da1-9b17-7ca99d2dd7b9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.927310] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e2896f9-05dc-495c-bf85-374ae308220b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.962553] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4361503-55ef-4a2f-a5e9-06f9d4aee61b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.971368] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edb4872f-ffe8-4190-a704-ae8c48b54527 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.986230] env[62522]: DEBUG nova.compute.provider_tree [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1003.124316] env[62522]: DEBUG oslo_vmware.api [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Task: {'id': task-2415886, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.666308} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.124452] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 895e6716-44cf-45b2-afd8-eaba71c32460/895e6716-44cf-45b2-afd8-eaba71c32460.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1003.124668] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1003.124913] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a2147f92-a853-481e-8eeb-670f7bf8c8e3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.136601] env[62522]: DEBUG oslo_vmware.api [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Waiting for the task: (returnval){ [ 1003.136601] env[62522]: value = "task-2415888" [ 1003.136601] env[62522]: _type = "Task" [ 1003.136601] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.145954] env[62522]: DEBUG oslo_vmware.api [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Task: {'id': task-2415888, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.219739] env[62522]: DEBUG nova.compute.manager [req-2fcea1b9-1dff-4f0a-ad61-ecb03537d092 req-9ca4d3ea-f9c8-422f-b91c-17c880d5b0b2 service nova] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Received event network-vif-plugged-f98850e9-37f2-496a-8b2e-590c3d8b7f5e {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1003.219959] env[62522]: DEBUG oslo_concurrency.lockutils [req-2fcea1b9-1dff-4f0a-ad61-ecb03537d092 req-9ca4d3ea-f9c8-422f-b91c-17c880d5b0b2 service nova] Acquiring lock "4e27a87c-4891-4e69-a6fa-312b026bf11e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.220221] env[62522]: DEBUG oslo_concurrency.lockutils [req-2fcea1b9-1dff-4f0a-ad61-ecb03537d092 req-9ca4d3ea-f9c8-422f-b91c-17c880d5b0b2 service nova] Lock "4e27a87c-4891-4e69-a6fa-312b026bf11e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.220393] env[62522]: DEBUG oslo_concurrency.lockutils [req-2fcea1b9-1dff-4f0a-ad61-ecb03537d092 req-9ca4d3ea-f9c8-422f-b91c-17c880d5b0b2 service nova] Lock "4e27a87c-4891-4e69-a6fa-312b026bf11e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.220567] env[62522]: DEBUG nova.compute.manager [req-2fcea1b9-1dff-4f0a-ad61-ecb03537d092 req-9ca4d3ea-f9c8-422f-b91c-17c880d5b0b2 service nova] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] No waiting events found dispatching network-vif-plugged-f98850e9-37f2-496a-8b2e-590c3d8b7f5e {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1003.220772] env[62522]: WARNING nova.compute.manager [req-2fcea1b9-1dff-4f0a-ad61-ecb03537d092 req-9ca4d3ea-f9c8-422f-b91c-17c880d5b0b2 service nova] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Received unexpected event network-vif-plugged-f98850e9-37f2-496a-8b2e-590c3d8b7f5e for instance with vm_state building and task_state spawning. [ 1003.306098] env[62522]: DEBUG nova.network.neutron [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Successfully updated port: f98850e9-37f2-496a-8b2e-590c3d8b7f5e {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1003.397247] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415887, 'name': CreateVM_Task, 'duration_secs': 0.50147} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.397247] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02708991-7f71-408e-89d8-932b845553d1] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1003.397687] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.397843] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.398166] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1003.398406] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e84cc87-6576-4355-88e7-49e01894cd81 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.405797] env[62522]: DEBUG oslo_vmware.api [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1003.405797] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520005aa-3795-7b2c-7b19-d6bdccf92ece" [ 1003.405797] env[62522]: _type = "Task" [ 1003.405797] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.412612] env[62522]: DEBUG oslo_vmware.api [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520005aa-3795-7b2c-7b19-d6bdccf92ece, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.461823] env[62522]: DEBUG nova.network.neutron [None req-93577fda-8e37-4180-a21b-9a60301a2d39 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Updating instance_info_cache with network_info: [{"id": "1b7d6d1b-0d3c-47b3-8d93-c8cdc0b1e00f", "address": "fa:16:3e:41:5d:d8", "network": {"id": "c3450427-ea7e-4a07-8399-53265d390e06", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1613138323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.174", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "686854cd52ce4809a4d315275260da54", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c42bb08a-77b4-4bba-8166-702cbb1b5f1e", "external-id": "nsx-vlan-transportzone-137", "segmentation_id": 137, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b7d6d1b-0d", "ovs_interfaceid": "1b7d6d1b-0d3c-47b3-8d93-c8cdc0b1e00f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.502326] env[62522]: INFO nova.compute.manager [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Rescuing [ 1003.503335] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquiring lock "refresh_cache-548364e9-b19a-4777-8e62-19b8a0594f36" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.503560] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquired lock "refresh_cache-548364e9-b19a-4777-8e62-19b8a0594f36" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.503741] env[62522]: DEBUG nova.network.neutron [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1003.523592] env[62522]: DEBUG nova.scheduler.client.report [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Updated inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with generation 111 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1003.523844] env[62522]: DEBUG nova.compute.provider_tree [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Updating resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 generation from 111 to 112 during operation: update_inventory {{(pid=62522) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1003.524092] env[62522]: DEBUG nova.compute.provider_tree [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1003.646760] env[62522]: DEBUG oslo_vmware.api [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Task: {'id': task-2415888, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070556} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.647041] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1003.648032] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-010155ad-72d2-4d71-a4f9-7912341bfbdc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.672777] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] 895e6716-44cf-45b2-afd8-eaba71c32460/895e6716-44cf-45b2-afd8-eaba71c32460.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1003.672857] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b47d9c56-f853-447d-96d6-9514fbbc91fa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.694292] env[62522]: DEBUG oslo_vmware.api [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Waiting for the task: (returnval){ [ 1003.694292] env[62522]: value = "task-2415889" [ 1003.694292] env[62522]: _type = "Task" [ 1003.694292] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.702943] env[62522]: DEBUG oslo_vmware.api [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Task: {'id': task-2415889, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.810703] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "refresh_cache-4e27a87c-4891-4e69-a6fa-312b026bf11e" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.811023] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquired lock "refresh_cache-4e27a87c-4891-4e69-a6fa-312b026bf11e" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.811095] env[62522]: DEBUG nova.network.neutron [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1003.918027] env[62522]: DEBUG oslo_vmware.api [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520005aa-3795-7b2c-7b19-d6bdccf92ece, 'name': SearchDatastore_Task, 'duration_secs': 0.010645} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.918027] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1003.918027] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1003.918027] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.918027] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.918027] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1003.918027] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4cfb2ede-fd73-4e7a-ba8a-d96787b8ec90 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.927389] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1003.927876] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1003.928821] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e093098-3a3c-40f9-b39e-8c95f5facda8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.936024] env[62522]: DEBUG oslo_vmware.api [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1003.936024] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e4f6d5-7e38-0971-445a-12877508f81c" [ 1003.936024] env[62522]: _type = "Task" [ 1003.936024] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.945990] env[62522]: DEBUG oslo_vmware.api [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e4f6d5-7e38-0971-445a-12877508f81c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.964864] env[62522]: DEBUG oslo_concurrency.lockutils [None req-93577fda-8e37-4180-a21b-9a60301a2d39 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Releasing lock "refresh_cache-3c4c395c-0625-4569-990d-e2d4ad162c14" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.028639] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.925s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.031259] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e9d397a8-8bb8-4a3d-abdb-1fa2e4be5766 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 2.762s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1004.055958] env[62522]: INFO nova.scheduler.client.report [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Deleted allocations for instance ec2d78cf-15f9-441b-9800-8fcc513f7774 [ 1004.206256] env[62522]: DEBUG oslo_vmware.api [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Task: {'id': task-2415889, 'name': ReconfigVM_Task, 'duration_secs': 0.286141} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.207018] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Reconfigured VM instance instance-00000050 to attach disk [datastore2] 895e6716-44cf-45b2-afd8-eaba71c32460/895e6716-44cf-45b2-afd8-eaba71c32460.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1004.207411] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b1e95081-754d-492a-a19c-4ed457458464 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.216729] env[62522]: DEBUG oslo_vmware.api [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Waiting for the task: (returnval){ [ 1004.216729] env[62522]: value = "task-2415890" [ 1004.216729] env[62522]: _type = "Task" [ 1004.216729] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.221307] env[62522]: DEBUG nova.network.neutron [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Updating instance_info_cache with network_info: [{"id": "47809969-d413-4587-acbe-3071b4ded420", "address": "fa:16:3e:d8:1d:76", "network": {"id": "4b04f6e1-0714-469b-9941-be6f5b6128d0", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-292476225-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e1ca5dfb8f2d4b2e932679e017fe8b3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47809969-d4", "ovs_interfaceid": "47809969-d413-4587-acbe-3071b4ded420", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.229141] env[62522]: DEBUG oslo_vmware.api [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Task: {'id': task-2415890, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.365296] env[62522]: DEBUG nova.network.neutron [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1004.449826] env[62522]: DEBUG oslo_vmware.api [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e4f6d5-7e38-0971-445a-12877508f81c, 'name': SearchDatastore_Task, 'duration_secs': 0.011111} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.450696] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62432f03-0e48-40b5-8579-1b442082541d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.457853] env[62522]: DEBUG oslo_vmware.api [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1004.457853] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52fcf327-9956-1b73-a0b5-cfe81fe44665" [ 1004.457853] env[62522]: _type = "Task" [ 1004.457853] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.469040] env[62522]: DEBUG oslo_vmware.api [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52fcf327-9956-1b73-a0b5-cfe81fe44665, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.566734] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2376ba3a-5233-494b-9ea0-f3e7129115c8 tempest-InstanceActionsNegativeTestJSON-1162911603 tempest-InstanceActionsNegativeTestJSON-1162911603-project-member] Lock "ec2d78cf-15f9-441b-9800-8fcc513f7774" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.024s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.589317] env[62522]: DEBUG nova.network.neutron [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Updating instance_info_cache with network_info: [{"id": "f98850e9-37f2-496a-8b2e-590c3d8b7f5e", "address": "fa:16:3e:ac:ab:68", "network": {"id": "d6a06fb0-929f-44b6-93c4-698be8498194", "bridge": "br-int", "label": "tempest-ImagesTestJSON-272550236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61314d3f0b9e4c368312e714a953e549", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "399f3826-705c-45f7-9fe0-3a08a945151a", "external-id": "nsx-vlan-transportzone-936", "segmentation_id": 936, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf98850e9-37", "ovs_interfaceid": "f98850e9-37f2-496a-8b2e-590c3d8b7f5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.728168] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Releasing lock "refresh_cache-548364e9-b19a-4777-8e62-19b8a0594f36" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.735915] env[62522]: DEBUG oslo_vmware.api [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Task: {'id': task-2415890, 'name': Rename_Task, 'duration_secs': 0.1631} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.735915] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1004.735915] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-56867d54-0f37-47ba-ae63-09cc83e91fe7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.741444] env[62522]: DEBUG oslo_vmware.api [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Waiting for the task: (returnval){ [ 1004.741444] env[62522]: value = "task-2415891" [ 1004.741444] env[62522]: _type = "Task" [ 1004.741444] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.752924] env[62522]: DEBUG oslo_vmware.api [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Task: {'id': task-2415891, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.852437] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e2c8f0b-648a-4fd7-aae5-b69bd4b94e35 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.861682] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-902e3e9d-89c6-4637-a207-93d4ba4961d1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.658788] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-93577fda-8e37-4180-a21b-9a60301a2d39 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1005.659645] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Releasing lock "refresh_cache-4e27a87c-4891-4e69-a6fa-312b026bf11e" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1005.659823] env[62522]: DEBUG nova.compute.manager [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Instance network_info: |[{"id": "f98850e9-37f2-496a-8b2e-590c3d8b7f5e", "address": "fa:16:3e:ac:ab:68", "network": {"id": "d6a06fb0-929f-44b6-93c4-698be8498194", "bridge": "br-int", "label": "tempest-ImagesTestJSON-272550236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61314d3f0b9e4c368312e714a953e549", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "399f3826-705c-45f7-9fe0-3a08a945151a", "external-id": "nsx-vlan-transportzone-936", "segmentation_id": 936, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf98850e9-37", "ovs_interfaceid": "f98850e9-37f2-496a-8b2e-590c3d8b7f5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1005.660146] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquiring lock "04a9d357-d094-487b-8f09-2f7e0c35f0d7" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.660353] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lock "04a9d357-d094-487b-8f09-2f7e0c35f0d7" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.660511] env[62522]: INFO nova.compute.manager [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Shelving [ 1005.666081] env[62522]: DEBUG nova.compute.manager [req-4a62d197-1748-44cc-8b60-a911e73f7a0f req-4c78a734-431f-44a4-a9e2-9cbcabf9c353 service nova] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Received event network-changed-f98850e9-37f2-496a-8b2e-590c3d8b7f5e {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1005.666081] env[62522]: DEBUG nova.compute.manager [req-4a62d197-1748-44cc-8b60-a911e73f7a0f req-4c78a734-431f-44a4-a9e2-9cbcabf9c353 service nova] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Refreshing instance network info cache due to event network-changed-f98850e9-37f2-496a-8b2e-590c3d8b7f5e. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1005.666081] env[62522]: DEBUG oslo_concurrency.lockutils [req-4a62d197-1748-44cc-8b60-a911e73f7a0f req-4c78a734-431f-44a4-a9e2-9cbcabf9c353 service nova] Acquiring lock "refresh_cache-4e27a87c-4891-4e69-a6fa-312b026bf11e" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1005.666081] env[62522]: DEBUG oslo_concurrency.lockutils [req-4a62d197-1748-44cc-8b60-a911e73f7a0f req-4c78a734-431f-44a4-a9e2-9cbcabf9c353 service nova] Acquired lock "refresh_cache-4e27a87c-4891-4e69-a6fa-312b026bf11e" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.666081] env[62522]: DEBUG nova.network.neutron [req-4a62d197-1748-44cc-8b60-a911e73f7a0f req-4c78a734-431f-44a4-a9e2-9cbcabf9c353 service nova] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Refreshing network info cache for port f98850e9-37f2-496a-8b2e-590c3d8b7f5e {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1005.670178] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-435bfd68-4457-4605-8e31-659024999e11 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.670886] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ac:ab:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '399f3826-705c-45f7-9fe0-3a08a945151a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f98850e9-37f2-496a-8b2e-590c3d8b7f5e', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1005.678362] env[62522]: DEBUG oslo.service.loopingcall [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1005.682506] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11356ecd-0281-44a1-addc-04af3291267c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.685802] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1005.687304] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bd8a809d-f94b-4a3e-8e7c-eaea2dd3b12b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.709297] env[62522]: DEBUG oslo_vmware.api [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52fcf327-9956-1b73-a0b5-cfe81fe44665, 'name': SearchDatastore_Task, 'duration_secs': 0.010648} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.709585] env[62522]: DEBUG oslo_vmware.api [None req-93577fda-8e37-4180-a21b-9a60301a2d39 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 1005.709585] env[62522]: value = "task-2415892" [ 1005.709585] env[62522]: _type = "Task" [ 1005.709585] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.714490] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1005.714759] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 02708991-7f71-408e-89d8-932b845553d1/02708991-7f71-408e-89d8-932b845553d1.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1005.715474] env[62522]: DEBUG oslo_vmware.api [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Task: {'id': task-2415891, 'name': PowerOnVM_Task, 'duration_secs': 0.488199} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.717259] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-374bb572-49af-4a15-9bc8-7fadd58c26f6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.722458] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1005.722608] env[62522]: INFO nova.compute.manager [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Took 8.43 seconds to spawn the instance on the hypervisor. [ 1005.722788] env[62522]: DEBUG nova.compute.manager [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1005.723983] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d15f9be8-3235-40ae-95c8-f3d291585d0d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.729174] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb24198-762d-43cb-bf7f-380d36ee2013 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.734447] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1005.734447] env[62522]: value = "task-2415893" [ 1005.734447] env[62522]: _type = "Task" [ 1005.734447] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.754584] env[62522]: DEBUG nova.compute.provider_tree [None req-e9d397a8-8bb8-4a3d-abdb-1fa2e4be5766 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1005.759024] env[62522]: DEBUG oslo_vmware.api [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1005.759024] env[62522]: value = "task-2415894" [ 1005.759024] env[62522]: _type = "Task" [ 1005.759024] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.759024] env[62522]: DEBUG oslo_vmware.api [None req-93577fda-8e37-4180-a21b-9a60301a2d39 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2415892, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.763600] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415893, 'name': CreateVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.771081] env[62522]: DEBUG oslo_vmware.api [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415894, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.233411] env[62522]: DEBUG oslo_vmware.api [None req-93577fda-8e37-4180-a21b-9a60301a2d39 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2415892, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.247031] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415893, 'name': CreateVM_Task, 'duration_secs': 0.471548} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.247207] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1006.247947] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f48c333-83a0-4c83-a8d3-3650d8a5edd4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1006.248125] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f48c333-83a0-4c83-a8d3-3650d8a5edd4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.249013] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f48c333-83a0-4c83-a8d3-3650d8a5edd4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1006.249649] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e17486a0-42dd-49ac-86de-61fcdba3c4e7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.256134] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 1006.256134] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5221d13e-b234-3bf2-f9a7-202e5b35713d" [ 1006.256134] env[62522]: _type = "Task" [ 1006.256134] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.269028] env[62522]: DEBUG nova.scheduler.client.report [None req-e9d397a8-8bb8-4a3d-abdb-1fa2e4be5766 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1006.275817] env[62522]: INFO nova.compute.manager [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Took 20.29 seconds to build instance. [ 1006.288102] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f48c333-83a0-4c83-a8d3-3650d8a5edd4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1006.288102] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Processing image 1f48c333-83a0-4c83-a8d3-3650d8a5edd4 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1006.288102] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f48c333-83a0-4c83-a8d3-3650d8a5edd4/1f48c333-83a0-4c83-a8d3-3650d8a5edd4.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1006.288102] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f48c333-83a0-4c83-a8d3-3650d8a5edd4/1f48c333-83a0-4c83-a8d3-3650d8a5edd4.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.288102] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1006.288102] env[62522]: DEBUG oslo_vmware.api [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415894, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.515506} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.288102] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-df240779-a029-4f8d-a64d-588f376f517d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.289555] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 02708991-7f71-408e-89d8-932b845553d1/02708991-7f71-408e-89d8-932b845553d1.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1006.289758] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1006.290449] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4bd41701-dfba-420f-b4a8-968b698b9e21 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.301762] env[62522]: DEBUG oslo_vmware.api [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1006.301762] env[62522]: value = "task-2415895" [ 1006.301762] env[62522]: _type = "Task" [ 1006.301762] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.303753] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1006.303924] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1006.308443] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44652ea5-ac79-4c17-8da1-217f03226e60 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.317791] env[62522]: DEBUG oslo_vmware.api [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415895, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.319282] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 1006.319282] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]528ad028-e793-e0b0-63b7-cd7e765d0ae2" [ 1006.319282] env[62522]: _type = "Task" [ 1006.319282] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.329159] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]528ad028-e793-e0b0-63b7-cd7e765d0ae2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.557196] env[62522]: DEBUG nova.network.neutron [req-4a62d197-1748-44cc-8b60-a911e73f7a0f req-4c78a734-431f-44a4-a9e2-9cbcabf9c353 service nova] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Updated VIF entry in instance network info cache for port f98850e9-37f2-496a-8b2e-590c3d8b7f5e. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1006.557690] env[62522]: DEBUG nova.network.neutron [req-4a62d197-1748-44cc-8b60-a911e73f7a0f req-4c78a734-431f-44a4-a9e2-9cbcabf9c353 service nova] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Updating instance_info_cache with network_info: [{"id": "f98850e9-37f2-496a-8b2e-590c3d8b7f5e", "address": "fa:16:3e:ac:ab:68", "network": {"id": "d6a06fb0-929f-44b6-93c4-698be8498194", "bridge": "br-int", "label": "tempest-ImagesTestJSON-272550236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61314d3f0b9e4c368312e714a953e549", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "399f3826-705c-45f7-9fe0-3a08a945151a", "external-id": "nsx-vlan-transportzone-936", "segmentation_id": 936, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf98850e9-37", "ovs_interfaceid": "f98850e9-37f2-496a-8b2e-590c3d8b7f5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1006.669435] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1006.669752] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2a92bc4b-aae5-4805-bcc9-801ef639ac10 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.678854] env[62522]: DEBUG oslo_vmware.api [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1006.678854] env[62522]: value = "task-2415896" [ 1006.678854] env[62522]: _type = "Task" [ 1006.678854] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.692514] env[62522]: DEBUG oslo_vmware.api [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415896, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.723527] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1006.727073] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f1c19f91-4745-481f-bdc0-376c6a0df22b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.731141] env[62522]: DEBUG oslo_vmware.api [None req-93577fda-8e37-4180-a21b-9a60301a2d39 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2415892, 'name': PowerOnVM_Task, 'duration_secs': 0.7328} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.731141] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-93577fda-8e37-4180-a21b-9a60301a2d39 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1006.731141] env[62522]: DEBUG nova.compute.manager [None req-93577fda-8e37-4180-a21b-9a60301a2d39 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1006.731141] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed6ac1d2-0e4f-4a4d-87c5-6611752d2654 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.735853] env[62522]: DEBUG oslo_vmware.api [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 1006.735853] env[62522]: value = "task-2415897" [ 1006.735853] env[62522]: _type = "Task" [ 1006.735853] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.754397] env[62522]: DEBUG oslo_vmware.api [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415897, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.786082] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0e0b0a27-b58e-4645-b66d-25c01d2560b5 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Lock "895e6716-44cf-45b2-afd8-eaba71c32460" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.817s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.815036] env[62522]: DEBUG oslo_vmware.api [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415895, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.098513} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.816082] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1006.816936] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815dcc29-85b9-41b3-af95-361cc954128e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.851132] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] 02708991-7f71-408e-89d8-932b845553d1/02708991-7f71-408e-89d8-932b845553d1.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1006.851132] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Preparing fetch location {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1006.851348] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Fetch image to [datastore2] OSTACK_IMG_2829f13c-6e6e-4211-b1e4-c48ee3729b0f/OSTACK_IMG_2829f13c-6e6e-4211-b1e4-c48ee3729b0f.vmdk {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1006.851527] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Downloading stream optimized image 1f48c333-83a0-4c83-a8d3-3650d8a5edd4 to [datastore2] OSTACK_IMG_2829f13c-6e6e-4211-b1e4-c48ee3729b0f/OSTACK_IMG_2829f13c-6e6e-4211-b1e4-c48ee3729b0f.vmdk on the data store datastore2 as vApp {{(pid=62522) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1006.851692] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Downloading image file data 1f48c333-83a0-4c83-a8d3-3650d8a5edd4 to the ESX as VM named 'OSTACK_IMG_2829f13c-6e6e-4211-b1e4-c48ee3729b0f' {{(pid=62522) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1006.854017] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4c3bbeb0-1395-4c4e-baec-56d629c48515 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.878683] env[62522]: DEBUG oslo_vmware.api [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1006.878683] env[62522]: value = "task-2415898" [ 1006.878683] env[62522]: _type = "Task" [ 1006.878683] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.888646] env[62522]: DEBUG oslo_vmware.api [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415898, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.957241] env[62522]: DEBUG oslo_vmware.rw_handles [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1006.957241] env[62522]: value = "resgroup-9" [ 1006.957241] env[62522]: _type = "ResourcePool" [ 1006.957241] env[62522]: }. {{(pid=62522) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1006.957241] env[62522]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-7e95c57c-332b-49d7-ad76-db1c123a1bdc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.980521] env[62522]: DEBUG oslo_vmware.rw_handles [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lease: (returnval){ [ 1006.980521] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52063c1e-cf88-3c4b-c5fe-37d811652a49" [ 1006.980521] env[62522]: _type = "HttpNfcLease" [ 1006.980521] env[62522]: } obtained for vApp import into resource pool (val){ [ 1006.980521] env[62522]: value = "resgroup-9" [ 1006.980521] env[62522]: _type = "ResourcePool" [ 1006.980521] env[62522]: }. {{(pid=62522) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1006.981106] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the lease: (returnval){ [ 1006.981106] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52063c1e-cf88-3c4b-c5fe-37d811652a49" [ 1006.981106] env[62522]: _type = "HttpNfcLease" [ 1006.981106] env[62522]: } to be ready. {{(pid=62522) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1006.991510] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1006.991510] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52063c1e-cf88-3c4b-c5fe-37d811652a49" [ 1006.991510] env[62522]: _type = "HttpNfcLease" [ 1006.991510] env[62522]: } is initializing. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1007.060894] env[62522]: DEBUG oslo_concurrency.lockutils [req-4a62d197-1748-44cc-8b60-a911e73f7a0f req-4c78a734-431f-44a4-a9e2-9cbcabf9c353 service nova] Releasing lock "refresh_cache-4e27a87c-4891-4e69-a6fa-312b026bf11e" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1007.191330] env[62522]: DEBUG oslo_vmware.api [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415896, 'name': PowerOffVM_Task, 'duration_secs': 0.360249} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.191773] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1007.192599] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4659a907-a2d5-419a-b7d3-feed069f2bfc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.215681] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a5daa3a-caeb-4fbb-89dc-26ce602a1b44 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.253694] env[62522]: DEBUG oslo_vmware.api [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415897, 'name': PowerOffVM_Task, 'duration_secs': 0.416091} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.257856] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1007.258304] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1007.262098] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7b9343be-16e8-42b5-a9f1-5c6eb7f587ea {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.262098] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8005f08-2ac3-4b0e-86f5-d73348128bc9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.284838] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a3385ed-4ad2-49cf-8363-3b32eb26aefc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.287696] env[62522]: DEBUG oslo_vmware.api [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1007.287696] env[62522]: value = "task-2415900" [ 1007.287696] env[62522]: _type = "Task" [ 1007.287696] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.289024] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e9d397a8-8bb8-4a3d-abdb-1fa2e4be5766 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 3.258s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.309854] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] VM already powered off {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1007.309854] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1007.309854] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1007.309854] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.309854] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1007.309854] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-004cc9a4-83eb-4def-9b3c-1967f032b34e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.319618] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1007.319822] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1007.320598] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4fb7ef9-a890-4364-abde-79bdc11eb921 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.327826] env[62522]: DEBUG oslo_vmware.api [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1007.327826] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e4b0f3-f2d8-a8fe-96ee-c4295ff4d940" [ 1007.327826] env[62522]: _type = "Task" [ 1007.327826] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.339317] env[62522]: DEBUG oslo_vmware.api [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e4b0f3-f2d8-a8fe-96ee-c4295ff4d940, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.393754] env[62522]: DEBUG oslo_vmware.api [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415898, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.492959] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1007.492959] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52063c1e-cf88-3c4b-c5fe-37d811652a49" [ 1007.492959] env[62522]: _type = "HttpNfcLease" [ 1007.492959] env[62522]: } is initializing. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1007.804447] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Creating Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1007.804868] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ce289ca5-21af-4ddc-9e9c-c314c9231725 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.814761] env[62522]: DEBUG oslo_vmware.api [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 1007.814761] env[62522]: value = "task-2415901" [ 1007.814761] env[62522]: _type = "Task" [ 1007.814761] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.825502] env[62522]: DEBUG oslo_vmware.api [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415901, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.838616] env[62522]: DEBUG oslo_vmware.api [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e4b0f3-f2d8-a8fe-96ee-c4295ff4d940, 'name': SearchDatastore_Task, 'duration_secs': 0.019962} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.839238] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e741f122-b3c7-4bdc-9b7e-89c22dbd1cb8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.846558] env[62522]: DEBUG oslo_vmware.api [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1007.846558] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f01a0b-c21c-4363-3a58-5e7ed80865d0" [ 1007.846558] env[62522]: _type = "Task" [ 1007.846558] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.852932] env[62522]: INFO nova.scheduler.client.report [None req-e9d397a8-8bb8-4a3d-abdb-1fa2e4be5766 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Deleted allocation for migration e03fc413-2c2e-4a7d-9c2e-0f29ad86ac13 [ 1007.863979] env[62522]: DEBUG oslo_vmware.api [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f01a0b-c21c-4363-3a58-5e7ed80865d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.891606] env[62522]: DEBUG oslo_vmware.api [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415898, 'name': ReconfigVM_Task, 'duration_secs': 0.684227} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.894707] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Reconfigured VM instance instance-00000051 to attach disk [datastore2] 02708991-7f71-408e-89d8-932b845553d1/02708991-7f71-408e-89d8-932b845553d1.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1007.894707] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f46262c0-4e17-42f1-9488-9504467f5b0e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.901053] env[62522]: DEBUG oslo_vmware.api [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1007.901053] env[62522]: value = "task-2415902" [ 1007.901053] env[62522]: _type = "Task" [ 1007.901053] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.910735] env[62522]: DEBUG oslo_vmware.api [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415902, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.995023] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1007.995023] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52063c1e-cf88-3c4b-c5fe-37d811652a49" [ 1007.995023] env[62522]: _type = "HttpNfcLease" [ 1007.995023] env[62522]: } is ready. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1007.995023] env[62522]: DEBUG oslo_vmware.rw_handles [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1007.995023] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52063c1e-cf88-3c4b-c5fe-37d811652a49" [ 1007.995023] env[62522]: _type = "HttpNfcLease" [ 1007.995023] env[62522]: }. {{(pid=62522) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1007.995023] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3471a400-6f49-46b9-b903-12b76236cff5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.002604] env[62522]: DEBUG oslo_vmware.rw_handles [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52716266-1f5b-e6f0-2313-04491754bcb6/disk-0.vmdk from lease info. {{(pid=62522) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1008.002795] env[62522]: DEBUG oslo_vmware.rw_handles [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52716266-1f5b-e6f0-2313-04491754bcb6/disk-0.vmdk. {{(pid=62522) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1008.077279] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-02868e04-39c5-4da3-a42e-605e361f1be0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.126317] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquiring lock "921c14c9-27fa-4eda-9831-6263ad0d6c57" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.126317] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lock "921c14c9-27fa-4eda-9831-6263ad0d6c57" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.165018] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquiring lock "4e9436df-c86b-429b-abc2-97f760858055" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.165276] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lock "4e9436df-c86b-429b-abc2-97f760858055" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.194240] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquiring lock "97f4c6ab-04de-4069-8ce0-1509c30ffb0f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.194472] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lock "97f4c6ab-04de-4069-8ce0-1509c30ffb0f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.336027] env[62522]: DEBUG oslo_vmware.api [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415901, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.358654] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e9d397a8-8bb8-4a3d-abdb-1fa2e4be5766 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "917469c5-20be-4814-814f-a042415be021" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 10.874s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.359895] env[62522]: DEBUG oslo_vmware.api [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f01a0b-c21c-4363-3a58-5e7ed80865d0, 'name': SearchDatastore_Task, 'duration_secs': 0.014251} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.361751] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1008.362044] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 548364e9-b19a-4777-8e62-19b8a0594f36/2ee4561b-ba48-4f45-82f6-eac89be98290-rescue.vmdk. {{(pid=62522) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1008.362580] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4036bbf7-df13-41a9-b8a2-1e10b10f2882 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.373716] env[62522]: DEBUG oslo_vmware.api [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1008.373716] env[62522]: value = "task-2415903" [ 1008.373716] env[62522]: _type = "Task" [ 1008.373716] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.385025] env[62522]: DEBUG oslo_vmware.api [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415903, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.413084] env[62522]: DEBUG oslo_vmware.api [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415902, 'name': Rename_Task, 'duration_secs': 0.258609} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.414958] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1008.415575] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-451f389c-15c7-4d15-bfa6-d849ef9247fd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.423323] env[62522]: DEBUG oslo_vmware.api [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1008.423323] env[62522]: value = "task-2415904" [ 1008.423323] env[62522]: _type = "Task" [ 1008.423323] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.434135] env[62522]: DEBUG oslo_vmware.api [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415904, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.629169] env[62522]: DEBUG nova.compute.manager [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1008.668102] env[62522]: DEBUG nova.compute.manager [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1008.697122] env[62522]: DEBUG nova.compute.manager [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1008.739552] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Acquiring lock "895e6716-44cf-45b2-afd8-eaba71c32460" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.739909] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Lock "895e6716-44cf-45b2-afd8-eaba71c32460" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.740224] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Acquiring lock "895e6716-44cf-45b2-afd8-eaba71c32460-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.740499] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Lock "895e6716-44cf-45b2-afd8-eaba71c32460-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.740750] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Lock "895e6716-44cf-45b2-afd8-eaba71c32460-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.743221] env[62522]: INFO nova.compute.manager [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Terminating instance [ 1008.834215] env[62522]: DEBUG oslo_vmware.api [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415901, 'name': CreateSnapshot_Task, 'duration_secs': 0.712877} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.834215] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Created Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1008.834392] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b6f17db-533a-495b-8d2b-451fc522f9de {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.870371] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a4e976bc-b245-4774-b252-3c78ae45b362 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "bf44e269-0297-473e-b6ce-04a40d0ec1b4" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.870764] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a4e976bc-b245-4774-b252-3c78ae45b362 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "bf44e269-0297-473e-b6ce-04a40d0ec1b4" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.889384] env[62522]: DEBUG oslo_vmware.api [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415903, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.937947] env[62522]: DEBUG oslo_vmware.api [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415904, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.950991] env[62522]: DEBUG oslo_vmware.rw_handles [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Completed reading data from the image iterator. {{(pid=62522) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1008.951432] env[62522]: DEBUG oslo_vmware.rw_handles [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52716266-1f5b-e6f0-2313-04491754bcb6/disk-0.vmdk. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1008.952414] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ed543c8-1b65-4be8-8a10-d0c368989193 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.962159] env[62522]: DEBUG oslo_vmware.rw_handles [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52716266-1f5b-e6f0-2313-04491754bcb6/disk-0.vmdk is in state: ready. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1008.962159] env[62522]: DEBUG oslo_vmware.rw_handles [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52716266-1f5b-e6f0-2313-04491754bcb6/disk-0.vmdk. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1008.962467] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-99fd9840-47c3-4704-912b-fd44bab5f630 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.162689] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.163142] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.165175] env[62522]: INFO nova.compute.claims [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1009.190514] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.218375] env[62522]: DEBUG oslo_vmware.rw_handles [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52716266-1f5b-e6f0-2313-04491754bcb6/disk-0.vmdk. {{(pid=62522) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1009.218850] env[62522]: INFO nova.virt.vmwareapi.images [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Downloaded image file data 1f48c333-83a0-4c83-a8d3-3650d8a5edd4 [ 1009.220383] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b297dd-fb72-42f4-9f0d-54e60c2e7c56 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.224322] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.237785] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4bb38f48-d200-46bc-b9ee-ed9d6faa5203 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.249642] env[62522]: DEBUG nova.compute.manager [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1009.249850] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1009.250693] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f25902e4-1207-488d-9790-794fa9eb2749 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.258955] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1009.259241] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4f169a6e-c085-475d-9c8a-c30741317623 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.266550] env[62522]: DEBUG oslo_vmware.api [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Waiting for the task: (returnval){ [ 1009.266550] env[62522]: value = "task-2415906" [ 1009.266550] env[62522]: _type = "Task" [ 1009.266550] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.273839] env[62522]: INFO nova.virt.vmwareapi.images [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] The imported VM was unregistered [ 1009.276070] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Caching image {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1009.276355] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Creating directory with path [datastore2] devstack-image-cache_base/1f48c333-83a0-4c83-a8d3-3650d8a5edd4 {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1009.280429] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7dd5bf8f-9e3b-4245-ad16-65f4e5a02642 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.282827] env[62522]: DEBUG oslo_vmware.api [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Task: {'id': task-2415906, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.294665] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Created directory with path [datastore2] devstack-image-cache_base/1f48c333-83a0-4c83-a8d3-3650d8a5edd4 {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1009.294871] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_2829f13c-6e6e-4211-b1e4-c48ee3729b0f/OSTACK_IMG_2829f13c-6e6e-4211-b1e4-c48ee3729b0f.vmdk to [datastore2] devstack-image-cache_base/1f48c333-83a0-4c83-a8d3-3650d8a5edd4/1f48c333-83a0-4c83-a8d3-3650d8a5edd4.vmdk. {{(pid=62522) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1009.296263] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-2dea13c7-f189-4b79-8bcb-2482165652f2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.308547] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 1009.308547] env[62522]: value = "task-2415907" [ 1009.308547] env[62522]: _type = "Task" [ 1009.308547] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.317913] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415907, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.361363] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Creating linked-clone VM from snapshot {{(pid=62522) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1009.362804] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-13d2d2c3-682f-4a7a-8e91-f9cf93616a9c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.375421] env[62522]: DEBUG oslo_vmware.api [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 1009.375421] env[62522]: value = "task-2415908" [ 1009.375421] env[62522]: _type = "Task" [ 1009.375421] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.378884] env[62522]: DEBUG nova.compute.utils [None req-a4e976bc-b245-4774-b252-3c78ae45b362 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1009.389481] env[62522]: DEBUG oslo_vmware.api [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415908, 'name': CloneVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.392979] env[62522]: DEBUG oslo_vmware.api [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415903, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.617385} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.393279] env[62522]: INFO nova.virt.vmwareapi.ds_util [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 548364e9-b19a-4777-8e62-19b8a0594f36/2ee4561b-ba48-4f45-82f6-eac89be98290-rescue.vmdk. [ 1009.394524] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42bd7ca5-4a34-40aa-93f4-0ec769cec182 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.425364] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] 548364e9-b19a-4777-8e62-19b8a0594f36/2ee4561b-ba48-4f45-82f6-eac89be98290-rescue.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1009.426094] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-50377472-ea8c-4db9-b528-d6b5cc0e1036 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.451838] env[62522]: DEBUG oslo_vmware.api [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415904, 'name': PowerOnVM_Task, 'duration_secs': 1.007919} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.453508] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1009.453807] env[62522]: INFO nova.compute.manager [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Took 9.70 seconds to spawn the instance on the hypervisor. [ 1009.454095] env[62522]: DEBUG nova.compute.manager [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1009.454588] env[62522]: DEBUG oslo_vmware.api [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1009.454588] env[62522]: value = "task-2415909" [ 1009.454588] env[62522]: _type = "Task" [ 1009.454588] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.455512] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f127b5c7-6a8e-4c13-b028-11d004e5b6b7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.471317] env[62522]: DEBUG oslo_vmware.api [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415909, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.779575] env[62522]: DEBUG oslo_vmware.api [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Task: {'id': task-2415906, 'name': PowerOffVM_Task, 'duration_secs': 0.410465} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.779896] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1009.780170] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1009.780384] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c2dcb837-0dd0-4b92-8fae-8424d40a3c81 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.803092] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fa085f28-b3b5-4d63-a19a-a9a819e80b50 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "917469c5-20be-4814-814f-a042415be021" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.803360] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fa085f28-b3b5-4d63-a19a-a9a819e80b50 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "917469c5-20be-4814-814f-a042415be021" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.803617] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fa085f28-b3b5-4d63-a19a-a9a819e80b50 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "917469c5-20be-4814-814f-a042415be021-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.803906] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fa085f28-b3b5-4d63-a19a-a9a819e80b50 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "917469c5-20be-4814-814f-a042415be021-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.804014] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fa085f28-b3b5-4d63-a19a-a9a819e80b50 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "917469c5-20be-4814-814f-a042415be021-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.806358] env[62522]: INFO nova.compute.manager [None req-fa085f28-b3b5-4d63-a19a-a9a819e80b50 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Terminating instance [ 1009.819429] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415907, 'name': MoveVirtualDisk_Task} progress is 15%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.887453] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a4e976bc-b245-4774-b252-3c78ae45b362 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "bf44e269-0297-473e-b6ce-04a40d0ec1b4" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.017s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.888444] env[62522]: DEBUG oslo_vmware.api [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415908, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.901633] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1009.901984] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1009.901984] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Deleting the datastore file [datastore2] 895e6716-44cf-45b2-afd8-eaba71c32460 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1009.902250] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5fc5262f-f770-46d0-8ba0-de7e01fcb6e9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.911260] env[62522]: DEBUG oslo_vmware.api [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Waiting for the task: (returnval){ [ 1009.911260] env[62522]: value = "task-2415911" [ 1009.911260] env[62522]: _type = "Task" [ 1009.911260] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.920687] env[62522]: DEBUG oslo_vmware.api [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Task: {'id': task-2415911, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.969525] env[62522]: DEBUG oslo_vmware.api [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415909, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.982961] env[62522]: INFO nova.compute.manager [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Took 21.50 seconds to build instance. [ 1010.293385] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "cabe40a0-8bd0-4d77-b949-298bd194fa42" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.293643] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "cabe40a0-8bd0-4d77-b949-298bd194fa42" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.317015] env[62522]: DEBUG nova.compute.manager [None req-fa085f28-b3b5-4d63-a19a-a9a819e80b50 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1010.317285] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fa085f28-b3b5-4d63-a19a-a9a819e80b50 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1010.318129] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a6e8e06-7c33-4d69-a828-a9810f95ee68 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.330059] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415907, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.332535] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa085f28-b3b5-4d63-a19a-a9a819e80b50 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1010.332924] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-28f1a6f3-4cce-4548-b940-6f0860269bef {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.344606] env[62522]: DEBUG oslo_vmware.api [None req-fa085f28-b3b5-4d63-a19a-a9a819e80b50 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 1010.344606] env[62522]: value = "task-2415912" [ 1010.344606] env[62522]: _type = "Task" [ 1010.344606] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.354412] env[62522]: DEBUG oslo_vmware.api [None req-fa085f28-b3b5-4d63-a19a-a9a819e80b50 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415912, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.388644] env[62522]: DEBUG oslo_vmware.api [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415908, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.425737] env[62522]: DEBUG oslo_vmware.api [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Task: {'id': task-2415911, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.472171] env[62522]: DEBUG oslo_vmware.api [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415909, 'name': ReconfigVM_Task, 'duration_secs': 0.766014} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.474166] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Reconfigured VM instance instance-0000004f to attach disk [datastore2] 548364e9-b19a-4777-8e62-19b8a0594f36/2ee4561b-ba48-4f45-82f6-eac89be98290-rescue.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1010.481896] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a339ea78-6db5-4639-b7cf-9e51d2c23f7e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.486991] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e8b606a8-eb55-4658-98f5-ed7bc47c95a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "02708991-7f71-408e-89d8-932b845553d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.022s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.525221] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95abec35-a5a6-4522-8fa0-a14e16f7593f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.546111] env[62522]: DEBUG oslo_vmware.api [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1010.546111] env[62522]: value = "task-2415913" [ 1010.546111] env[62522]: _type = "Task" [ 1010.546111] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.562234] env[62522]: DEBUG oslo_vmware.api [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415913, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.601269] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a02b4f12-cf89-4f87-bd33-9ac6c6acb1e5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.613024] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f014d94d-3473-465e-932f-8d316b3d2705 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.648481] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd24390-8e53-4192-a85e-8f34b932ba54 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.657232] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca80788b-abfc-421a-afde-ed987257170a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.673205] env[62522]: DEBUG nova.compute.provider_tree [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1010.796140] env[62522]: DEBUG nova.compute.manager [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1010.823283] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415907, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.856788] env[62522]: DEBUG oslo_vmware.api [None req-fa085f28-b3b5-4d63-a19a-a9a819e80b50 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415912, 'name': PowerOffVM_Task, 'duration_secs': 0.451268} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.857127] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa085f28-b3b5-4d63-a19a-a9a819e80b50 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1010.857307] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fa085f28-b3b5-4d63-a19a-a9a819e80b50 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1010.857577] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d9691a12-0d1c-4e49-828e-c6781619dc2c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.887162] env[62522]: DEBUG oslo_vmware.api [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415908, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.924160] env[62522]: DEBUG oslo_vmware.api [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Task: {'id': task-2415911, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.945482] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fa085f28-b3b5-4d63-a19a-a9a819e80b50 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1010.945699] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fa085f28-b3b5-4d63-a19a-a9a819e80b50 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1010.945828] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa085f28-b3b5-4d63-a19a-a9a819e80b50 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Deleting the datastore file [datastore2] 917469c5-20be-4814-814f-a042415be021 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1010.946135] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0ce5408e-0f90-44ae-ba95-ea000675de2b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.953978] env[62522]: DEBUG oslo_vmware.api [None req-fa085f28-b3b5-4d63-a19a-a9a819e80b50 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 1010.953978] env[62522]: value = "task-2415915" [ 1010.953978] env[62522]: _type = "Task" [ 1010.953978] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.963714] env[62522]: DEBUG oslo_vmware.api [None req-fa085f28-b3b5-4d63-a19a-a9a819e80b50 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415915, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.979294] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a4e976bc-b245-4774-b252-3c78ae45b362 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "bf44e269-0297-473e-b6ce-04a40d0ec1b4" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.979634] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a4e976bc-b245-4774-b252-3c78ae45b362 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "bf44e269-0297-473e-b6ce-04a40d0ec1b4" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.980009] env[62522]: INFO nova.compute.manager [None req-a4e976bc-b245-4774-b252-3c78ae45b362 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Attaching volume 56b832bf-8626-456e-9706-070e3adf329d to /dev/sdb [ 1011.024072] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80d3143f-9479-4e36-a96b-56971a00fbce {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.034919] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34bde4b7-1610-44ee-b7ff-a6b4d8023f1f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.054866] env[62522]: DEBUG nova.virt.block_device [None req-a4e976bc-b245-4774-b252-3c78ae45b362 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Updating existing volume attachment record: 2e7b40de-13c6-42f2-85f3-92d2ff32ed40 {{(pid=62522) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1011.063852] env[62522]: DEBUG oslo_vmware.api [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415913, 'name': ReconfigVM_Task, 'duration_secs': 0.482635} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.064229] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1011.064485] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a86b0023-28e2-4203-bcec-fdc70156f5f8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.073605] env[62522]: DEBUG oslo_vmware.api [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1011.073605] env[62522]: value = "task-2415916" [ 1011.073605] env[62522]: _type = "Task" [ 1011.073605] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.082737] env[62522]: DEBUG oslo_vmware.api [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415916, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.179598] env[62522]: DEBUG nova.scheduler.client.report [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1011.324100] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415907, 'name': MoveVirtualDisk_Task} progress is 77%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.325404] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.390461] env[62522]: DEBUG oslo_concurrency.lockutils [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "3b2cd0b6-0c7a-411c-a7f5-64835f2179dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.390461] env[62522]: DEBUG oslo_concurrency.lockutils [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "3b2cd0b6-0c7a-411c-a7f5-64835f2179dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.400451] env[62522]: DEBUG oslo_vmware.api [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415908, 'name': CloneVM_Task} progress is 95%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.424404] env[62522]: DEBUG oslo_vmware.api [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Task: {'id': task-2415911, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.465551] env[62522]: DEBUG oslo_vmware.api [None req-fa085f28-b3b5-4d63-a19a-a9a819e80b50 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415915, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.585434] env[62522]: DEBUG oslo_vmware.api [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415916, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.686117] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.523s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.686878] env[62522]: DEBUG nova.compute.manager [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1011.689538] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.499s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.691018] env[62522]: INFO nova.compute.claims [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1011.824155] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415907, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.888549] env[62522]: DEBUG oslo_vmware.api [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415908, 'name': CloneVM_Task, 'duration_secs': 2.302368} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.888815] env[62522]: INFO nova.virt.vmwareapi.vmops [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Created linked-clone VM from snapshot [ 1011.889619] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46206595-e7a3-4ac8-ba2b-165f28c188e9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.892381] env[62522]: DEBUG nova.compute.manager [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1011.901998] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Uploading image f9ed5f31-86db-46dd-b6c8-bd2cbb2f7fe8 {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1011.923479] env[62522]: DEBUG oslo_vmware.api [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Task: {'id': task-2415911, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.924446} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.925440] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1011.925727] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1011.925916] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1011.926103] env[62522]: INFO nova.compute.manager [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Took 2.68 seconds to destroy the instance on the hypervisor. [ 1011.926351] env[62522]: DEBUG oslo.service.loopingcall [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1011.926761] env[62522]: DEBUG nova.compute.manager [-] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1011.926865] env[62522]: DEBUG nova.network.neutron [-] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1011.932160] env[62522]: DEBUG oslo_vmware.rw_handles [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1011.932160] env[62522]: value = "vm-489787" [ 1011.932160] env[62522]: _type = "VirtualMachine" [ 1011.932160] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1011.932395] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-63cf8f14-94ce-493c-b671-ab4b09f386a1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.941030] env[62522]: DEBUG oslo_vmware.rw_handles [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lease: (returnval){ [ 1011.941030] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52025df5-53d4-3632-6a9b-69e2757103ae" [ 1011.941030] env[62522]: _type = "HttpNfcLease" [ 1011.941030] env[62522]: } obtained for exporting VM: (result){ [ 1011.941030] env[62522]: value = "vm-489787" [ 1011.941030] env[62522]: _type = "VirtualMachine" [ 1011.941030] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1011.941406] env[62522]: DEBUG oslo_vmware.api [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the lease: (returnval){ [ 1011.941406] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52025df5-53d4-3632-6a9b-69e2757103ae" [ 1011.941406] env[62522]: _type = "HttpNfcLease" [ 1011.941406] env[62522]: } to be ready. {{(pid=62522) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1011.949797] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1011.949797] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52025df5-53d4-3632-6a9b-69e2757103ae" [ 1011.949797] env[62522]: _type = "HttpNfcLease" [ 1011.949797] env[62522]: } is initializing. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1011.963427] env[62522]: DEBUG oslo_vmware.api [None req-fa085f28-b3b5-4d63-a19a-a9a819e80b50 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415915, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.087693] env[62522]: DEBUG oslo_vmware.api [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415916, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.198182] env[62522]: DEBUG nova.compute.utils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1012.199998] env[62522]: DEBUG nova.compute.manager [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1012.200187] env[62522]: DEBUG nova.network.neutron [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1012.206077] env[62522]: DEBUG nova.compute.manager [req-6ae8011d-9741-440d-8961-96aa5b7cd982 req-c439d7d6-0f10-45cd-90c2-8d58406b7740 service nova] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Received event network-vif-deleted-ca82312a-dff9-4d56-af90-21b3984f4146 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1012.206306] env[62522]: INFO nova.compute.manager [req-6ae8011d-9741-440d-8961-96aa5b7cd982 req-c439d7d6-0f10-45cd-90c2-8d58406b7740 service nova] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Neutron deleted interface ca82312a-dff9-4d56-af90-21b3984f4146; detaching it from the instance and deleting it from the info cache [ 1012.206482] env[62522]: DEBUG nova.network.neutron [req-6ae8011d-9741-440d-8961-96aa5b7cd982 req-c439d7d6-0f10-45cd-90c2-8d58406b7740 service nova] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.250819] env[62522]: DEBUG nova.policy [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a3d62ca8c3544b2ba6471af6a69a3d87', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'faee94ca03fc43149278a579ebe65682', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1012.325738] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415907, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.832759} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.327169] env[62522]: INFO nova.virt.vmwareapi.ds_util [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_2829f13c-6e6e-4211-b1e4-c48ee3729b0f/OSTACK_IMG_2829f13c-6e6e-4211-b1e4-c48ee3729b0f.vmdk to [datastore2] devstack-image-cache_base/1f48c333-83a0-4c83-a8d3-3650d8a5edd4/1f48c333-83a0-4c83-a8d3-3650d8a5edd4.vmdk. [ 1012.327169] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Cleaning up location [datastore2] OSTACK_IMG_2829f13c-6e6e-4211-b1e4-c48ee3729b0f {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1012.327169] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_2829f13c-6e6e-4211-b1e4-c48ee3729b0f {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1012.327169] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-35af7a96-d7c4-4456-80f6-0596fb5b39ac {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.336681] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 1012.336681] env[62522]: value = "task-2415921" [ 1012.336681] env[62522]: _type = "Task" [ 1012.336681] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.346371] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415921, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.418352] env[62522]: DEBUG oslo_concurrency.lockutils [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.451315] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1012.451315] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52025df5-53d4-3632-6a9b-69e2757103ae" [ 1012.451315] env[62522]: _type = "HttpNfcLease" [ 1012.451315] env[62522]: } is ready. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1012.451697] env[62522]: DEBUG oslo_vmware.rw_handles [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1012.451697] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52025df5-53d4-3632-6a9b-69e2757103ae" [ 1012.451697] env[62522]: _type = "HttpNfcLease" [ 1012.451697] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1012.452442] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38d885a1-bc86-476b-8eba-2fa4654754eb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.461796] env[62522]: DEBUG oslo_vmware.rw_handles [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52978378-76f7-07fa-d339-45478cbf7ce3/disk-0.vmdk from lease info. {{(pid=62522) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1012.462093] env[62522]: DEBUG oslo_vmware.rw_handles [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52978378-76f7-07fa-d339-45478cbf7ce3/disk-0.vmdk for reading. {{(pid=62522) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1012.522771] env[62522]: DEBUG oslo_vmware.api [None req-fa085f28-b3b5-4d63-a19a-a9a819e80b50 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415915, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.279743} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.524222] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa085f28-b3b5-4d63-a19a-a9a819e80b50 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1012.524434] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fa085f28-b3b5-4d63-a19a-a9a819e80b50 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1012.524603] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fa085f28-b3b5-4d63-a19a-a9a819e80b50 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1012.524838] env[62522]: INFO nova.compute.manager [None req-fa085f28-b3b5-4d63-a19a-a9a819e80b50 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 917469c5-20be-4814-814f-a042415be021] Took 2.21 seconds to destroy the instance on the hypervisor. [ 1012.525126] env[62522]: DEBUG oslo.service.loopingcall [None req-fa085f28-b3b5-4d63-a19a-a9a819e80b50 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1012.525665] env[62522]: DEBUG nova.compute.manager [-] [instance: 917469c5-20be-4814-814f-a042415be021] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1012.525793] env[62522]: DEBUG nova.network.neutron [-] [instance: 917469c5-20be-4814-814f-a042415be021] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1012.568268] env[62522]: DEBUG nova.network.neutron [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Successfully created port: 220c0c0d-f275-4f95-b1da-4d8f576166c8 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1012.585980] env[62522]: DEBUG oslo_vmware.api [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415916, 'name': PowerOnVM_Task, 'duration_secs': 1.359163} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.588203] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1012.590921] env[62522]: DEBUG nova.compute.manager [None req-8a494988-955d-446e-ad68-2b1bf49e76ca tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1012.591824] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6979869-296f-419f-b41c-4a584eb43d9c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.637922] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ed30e837-ea44-46e0-a5f8-918a4090bd0f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.685552] env[62522]: DEBUG nova.network.neutron [-] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.705370] env[62522]: DEBUG nova.compute.manager [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1012.709119] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4bebdf04-288d-4c54-a546-ffa8cff62da8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.721315] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe4085d-8106-4e19-81d3-653fccc773b5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.767698] env[62522]: DEBUG nova.compute.manager [req-6ae8011d-9741-440d-8961-96aa5b7cd982 req-c439d7d6-0f10-45cd-90c2-8d58406b7740 service nova] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Detach interface failed, port_id=ca82312a-dff9-4d56-af90-21b3984f4146, reason: Instance 895e6716-44cf-45b2-afd8-eaba71c32460 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1012.830699] env[62522]: DEBUG nova.compute.manager [req-83a4d2fe-8b65-4a5b-a792-49b89ebc6a06 req-a95ce003-46d4-48e8-bc47-80546c7fdd61 service nova] [instance: 917469c5-20be-4814-814f-a042415be021] Received event network-vif-deleted-195b1951-c091-4db1-82d8-3c20dfcaf6d1 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1012.830939] env[62522]: INFO nova.compute.manager [req-83a4d2fe-8b65-4a5b-a792-49b89ebc6a06 req-a95ce003-46d4-48e8-bc47-80546c7fdd61 service nova] [instance: 917469c5-20be-4814-814f-a042415be021] Neutron deleted interface 195b1951-c091-4db1-82d8-3c20dfcaf6d1; detaching it from the instance and deleting it from the info cache [ 1012.831248] env[62522]: DEBUG nova.network.neutron [req-83a4d2fe-8b65-4a5b-a792-49b89ebc6a06 req-a95ce003-46d4-48e8-bc47-80546c7fdd61 service nova] [instance: 917469c5-20be-4814-814f-a042415be021] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.851432] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415921, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.11357} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.851609] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1012.851792] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f48c333-83a0-4c83-a8d3-3650d8a5edd4/1f48c333-83a0-4c83-a8d3-3650d8a5edd4.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1012.852110] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/1f48c333-83a0-4c83-a8d3-3650d8a5edd4/1f48c333-83a0-4c83-a8d3-3650d8a5edd4.vmdk to [datastore2] 4e27a87c-4891-4e69-a6fa-312b026bf11e/4e27a87c-4891-4e69-a6fa-312b026bf11e.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1012.852452] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-429f4a4d-f98a-4431-af4c-6180a48a763e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.869702] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 1012.869702] env[62522]: value = "task-2415922" [ 1012.869702] env[62522]: _type = "Task" [ 1012.869702] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.879247] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415922, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.087370] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94885907-9c57-45b8-8b8a-03d6b123d7fa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.096398] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30e444c5-1194-443f-ab42-7db63c3768d4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.133435] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf5e06f-809f-4748-8b3f-12bc5dea2a4a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.143122] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b361eeb-b7ed-46b7-b59d-5c214122f593 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.162843] env[62522]: DEBUG nova.compute.provider_tree [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1013.189757] env[62522]: INFO nova.compute.manager [-] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Took 1.26 seconds to deallocate network for instance. [ 1013.302896] env[62522]: DEBUG nova.network.neutron [-] [instance: 917469c5-20be-4814-814f-a042415be021] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.337367] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-96a1178d-6544-42b5-9ff3-affcb706c021 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.348636] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d72e5c91-5606-4377-873a-07908694df27 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.389561] env[62522]: DEBUG nova.compute.manager [req-83a4d2fe-8b65-4a5b-a792-49b89ebc6a06 req-a95ce003-46d4-48e8-bc47-80546c7fdd61 service nova] [instance: 917469c5-20be-4814-814f-a042415be021] Detach interface failed, port_id=195b1951-c091-4db1-82d8-3c20dfcaf6d1, reason: Instance 917469c5-20be-4814-814f-a042415be021 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1013.396837] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415922, 'name': CopyVirtualDisk_Task} progress is 12%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.666654] env[62522]: DEBUG nova.scheduler.client.report [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1013.698835] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.715354] env[62522]: DEBUG nova.compute.manager [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1013.742776] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1013.743060] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1013.743431] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1013.743782] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1013.743782] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1013.743931] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1013.744162] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1013.744330] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1013.744498] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1013.745066] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1013.745066] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1013.745747] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f183cff-cd0e-4019-9606-e4ae6cbf92e4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.757096] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b964539-2864-4d70-bb3e-12024f0d6652 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.805698] env[62522]: INFO nova.compute.manager [-] [instance: 917469c5-20be-4814-814f-a042415be021] Took 1.28 seconds to deallocate network for instance. [ 1013.883969] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415922, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.141673] env[62522]: DEBUG nova.network.neutron [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Successfully updated port: 220c0c0d-f275-4f95-b1da-4d8f576166c8 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1014.172694] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.483s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.173927] env[62522]: DEBUG nova.compute.manager [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1014.176162] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.952s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.178150] env[62522]: INFO nova.compute.claims [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1014.314574] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fa085f28-b3b5-4d63-a19a-a9a819e80b50 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1014.320153] env[62522]: DEBUG nova.compute.manager [req-37502312-4f76-4980-bb5c-95d63a2032ec req-a8d0cacc-7167-44eb-9a70-f1f01405a9c9 service nova] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Received event network-vif-plugged-220c0c0d-f275-4f95-b1da-4d8f576166c8 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1014.320936] env[62522]: DEBUG oslo_concurrency.lockutils [req-37502312-4f76-4980-bb5c-95d63a2032ec req-a8d0cacc-7167-44eb-9a70-f1f01405a9c9 service nova] Acquiring lock "921c14c9-27fa-4eda-9831-6263ad0d6c57-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1014.321343] env[62522]: DEBUG oslo_concurrency.lockutils [req-37502312-4f76-4980-bb5c-95d63a2032ec req-a8d0cacc-7167-44eb-9a70-f1f01405a9c9 service nova] Lock "921c14c9-27fa-4eda-9831-6263ad0d6c57-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.321451] env[62522]: DEBUG oslo_concurrency.lockutils [req-37502312-4f76-4980-bb5c-95d63a2032ec req-a8d0cacc-7167-44eb-9a70-f1f01405a9c9 service nova] Lock "921c14c9-27fa-4eda-9831-6263ad0d6c57-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.321640] env[62522]: DEBUG nova.compute.manager [req-37502312-4f76-4980-bb5c-95d63a2032ec req-a8d0cacc-7167-44eb-9a70-f1f01405a9c9 service nova] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] No waiting events found dispatching network-vif-plugged-220c0c0d-f275-4f95-b1da-4d8f576166c8 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1014.321892] env[62522]: WARNING nova.compute.manager [req-37502312-4f76-4980-bb5c-95d63a2032ec req-a8d0cacc-7167-44eb-9a70-f1f01405a9c9 service nova] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Received unexpected event network-vif-plugged-220c0c0d-f275-4f95-b1da-4d8f576166c8 for instance with vm_state building and task_state spawning. [ 1014.322256] env[62522]: DEBUG nova.compute.manager [req-37502312-4f76-4980-bb5c-95d63a2032ec req-a8d0cacc-7167-44eb-9a70-f1f01405a9c9 service nova] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Received event network-changed-220c0c0d-f275-4f95-b1da-4d8f576166c8 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1014.322635] env[62522]: DEBUG nova.compute.manager [req-37502312-4f76-4980-bb5c-95d63a2032ec req-a8d0cacc-7167-44eb-9a70-f1f01405a9c9 service nova] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Refreshing instance network info cache due to event network-changed-220c0c0d-f275-4f95-b1da-4d8f576166c8. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1014.322845] env[62522]: DEBUG oslo_concurrency.lockutils [req-37502312-4f76-4980-bb5c-95d63a2032ec req-a8d0cacc-7167-44eb-9a70-f1f01405a9c9 service nova] Acquiring lock "refresh_cache-921c14c9-27fa-4eda-9831-6263ad0d6c57" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1014.322987] env[62522]: DEBUG oslo_concurrency.lockutils [req-37502312-4f76-4980-bb5c-95d63a2032ec req-a8d0cacc-7167-44eb-9a70-f1f01405a9c9 service nova] Acquired lock "refresh_cache-921c14c9-27fa-4eda-9831-6263ad0d6c57" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.323175] env[62522]: DEBUG nova.network.neutron [req-37502312-4f76-4980-bb5c-95d63a2032ec req-a8d0cacc-7167-44eb-9a70-f1f01405a9c9 service nova] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Refreshing network info cache for port 220c0c0d-f275-4f95-b1da-4d8f576166c8 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1014.386035] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415922, 'name': CopyVirtualDisk_Task} progress is 52%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.647344] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquiring lock "refresh_cache-921c14c9-27fa-4eda-9831-6263ad0d6c57" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1014.684007] env[62522]: DEBUG nova.compute.utils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1014.687936] env[62522]: DEBUG nova.compute.manager [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1014.688221] env[62522]: DEBUG nova.network.neutron [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1014.745516] env[62522]: DEBUG nova.policy [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a3d62ca8c3544b2ba6471af6a69a3d87', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'faee94ca03fc43149278a579ebe65682', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1014.763151] env[62522]: DEBUG oslo_concurrency.lockutils [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquiring lock "5c9b1120-84ad-48d5-8cd4-0cf387963066" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1014.763674] env[62522]: DEBUG oslo_concurrency.lockutils [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Lock "5c9b1120-84ad-48d5-8cd4-0cf387963066" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.869854] env[62522]: DEBUG nova.network.neutron [req-37502312-4f76-4980-bb5c-95d63a2032ec req-a8d0cacc-7167-44eb-9a70-f1f01405a9c9 service nova] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1014.883551] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415922, 'name': CopyVirtualDisk_Task} progress is 71%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.959570] env[62522]: DEBUG nova.network.neutron [req-37502312-4f76-4980-bb5c-95d63a2032ec req-a8d0cacc-7167-44eb-9a70-f1f01405a9c9 service nova] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.025510] env[62522]: DEBUG nova.network.neutron [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Successfully created port: 18f42e25-7b00-475d-8f2f-b150679eeeef {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1015.189911] env[62522]: DEBUG nova.compute.manager [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1015.267244] env[62522]: DEBUG nova.compute.manager [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1015.387736] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415922, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.461930] env[62522]: DEBUG oslo_concurrency.lockutils [req-37502312-4f76-4980-bb5c-95d63a2032ec req-a8d0cacc-7167-44eb-9a70-f1f01405a9c9 service nova] Releasing lock "refresh_cache-921c14c9-27fa-4eda-9831-6263ad0d6c57" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1015.463524] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquired lock "refresh_cache-921c14c9-27fa-4eda-9831-6263ad0d6c57" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.463663] env[62522]: DEBUG nova.network.neutron [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1015.588651] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cf32d5b-7d29-4f1a-bd28-de522739caad {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.597521] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06e1be39-ec7f-4ef1-b035-6b829ebbc358 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.629021] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4e976bc-b245-4774-b252-3c78ae45b362 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Volume attach. Driver type: vmdk {{(pid=62522) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1015.629293] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4e976bc-b245-4774-b252-3c78ae45b362 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489789', 'volume_id': '56b832bf-8626-456e-9706-070e3adf329d', 'name': 'volume-56b832bf-8626-456e-9706-070e3adf329d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'bf44e269-0297-473e-b6ce-04a40d0ec1b4', 'attached_at': '', 'detached_at': '', 'volume_id': '56b832bf-8626-456e-9706-070e3adf329d', 'serial': '56b832bf-8626-456e-9706-070e3adf329d'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1015.630090] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a6186ac-23cc-426e-be74-24a4b8a251aa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.633120] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c01b6db-8f27-4388-a3e6-5e7820d3e677 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.651548] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c23923-0f31-471a-8c5a-ae3e021b097d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.656223] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ad0fa7-1961-498d-ae0c-140198a25753 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.669560] env[62522]: DEBUG nova.compute.provider_tree [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1015.691731] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4e976bc-b245-4774-b252-3c78ae45b362 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] volume-56b832bf-8626-456e-9706-070e3adf329d/volume-56b832bf-8626-456e-9706-070e3adf329d.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1015.693395] env[62522]: DEBUG nova.scheduler.client.report [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1015.696380] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f7daf9b-d2f4-45c9-866b-f99974d82c79 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.722459] env[62522]: DEBUG oslo_vmware.api [None req-a4e976bc-b245-4774-b252-3c78ae45b362 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 1015.722459] env[62522]: value = "task-2415924" [ 1015.722459] env[62522]: _type = "Task" [ 1015.722459] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.737653] env[62522]: DEBUG oslo_vmware.api [None req-a4e976bc-b245-4774-b252-3c78ae45b362 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415924, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.788884] env[62522]: DEBUG oslo_concurrency.lockutils [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1015.887227] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415922, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.7539} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.887481] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/1f48c333-83a0-4c83-a8d3-3650d8a5edd4/1f48c333-83a0-4c83-a8d3-3650d8a5edd4.vmdk to [datastore2] 4e27a87c-4891-4e69-a6fa-312b026bf11e/4e27a87c-4891-4e69-a6fa-312b026bf11e.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1015.888282] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9868c89-0335-461a-9f38-ad8c320ff5fe {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.913750] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] 4e27a87c-4891-4e69-a6fa-312b026bf11e/4e27a87c-4891-4e69-a6fa-312b026bf11e.vmdk or device None with type streamOptimized {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1015.914026] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce13f80b-fcd9-4123-af04-b1fa1c520e17 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.935262] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 1015.935262] env[62522]: value = "task-2415925" [ 1015.935262] env[62522]: _type = "Task" [ 1015.935262] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.944482] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415925, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.997173] env[62522]: DEBUG nova.network.neutron [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1016.160204] env[62522]: DEBUG nova.network.neutron [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Updating instance_info_cache with network_info: [{"id": "220c0c0d-f275-4f95-b1da-4d8f576166c8", "address": "fa:16:3e:e4:f6:f6", "network": {"id": "302faf5b-b1e0-4048-920e-716cd79a6f7f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1208003028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "faee94ca03fc43149278a579ebe65682", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap220c0c0d-f2", "ovs_interfaceid": "220c0c0d-f275-4f95-b1da-4d8f576166c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.213581] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.037s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1016.214127] env[62522]: DEBUG nova.compute.manager [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1016.217317] env[62522]: DEBUG nova.compute.manager [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1016.219752] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.895s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.221173] env[62522]: INFO nova.compute.claims [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1016.234982] env[62522]: DEBUG oslo_vmware.api [None req-a4e976bc-b245-4774-b252-3c78ae45b362 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415924, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.247378] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1016.247636] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1016.247792] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1016.247972] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1016.248130] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1016.248281] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1016.248487] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1016.248644] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1016.248810] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1016.248969] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1016.249205] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1016.250091] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-092c3941-c26e-49e6-91d8-065270ed36ac {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.259527] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0af10cbf-754f-47fd-892d-6db1773dd627 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.447859] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415925, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.481097] env[62522]: DEBUG nova.compute.manager [req-6866cf05-1285-4c73-8222-9bedc7d96e0b req-a835e03d-3013-47af-b481-bf967073a033 service nova] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Received event network-vif-plugged-18f42e25-7b00-475d-8f2f-b150679eeeef {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1016.481390] env[62522]: DEBUG oslo_concurrency.lockutils [req-6866cf05-1285-4c73-8222-9bedc7d96e0b req-a835e03d-3013-47af-b481-bf967073a033 service nova] Acquiring lock "4e9436df-c86b-429b-abc2-97f760858055-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1016.481632] env[62522]: DEBUG oslo_concurrency.lockutils [req-6866cf05-1285-4c73-8222-9bedc7d96e0b req-a835e03d-3013-47af-b481-bf967073a033 service nova] Lock "4e9436df-c86b-429b-abc2-97f760858055-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.481799] env[62522]: DEBUG oslo_concurrency.lockutils [req-6866cf05-1285-4c73-8222-9bedc7d96e0b req-a835e03d-3013-47af-b481-bf967073a033 service nova] Lock "4e9436df-c86b-429b-abc2-97f760858055-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1016.481971] env[62522]: DEBUG nova.compute.manager [req-6866cf05-1285-4c73-8222-9bedc7d96e0b req-a835e03d-3013-47af-b481-bf967073a033 service nova] [instance: 4e9436df-c86b-429b-abc2-97f760858055] No waiting events found dispatching network-vif-plugged-18f42e25-7b00-475d-8f2f-b150679eeeef {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1016.482360] env[62522]: WARNING nova.compute.manager [req-6866cf05-1285-4c73-8222-9bedc7d96e0b req-a835e03d-3013-47af-b481-bf967073a033 service nova] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Received unexpected event network-vif-plugged-18f42e25-7b00-475d-8f2f-b150679eeeef for instance with vm_state building and task_state spawning. [ 1016.587596] env[62522]: DEBUG nova.network.neutron [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Successfully updated port: 18f42e25-7b00-475d-8f2f-b150679eeeef {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1016.662854] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Releasing lock "refresh_cache-921c14c9-27fa-4eda-9831-6263ad0d6c57" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1016.663161] env[62522]: DEBUG nova.compute.manager [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Instance network_info: |[{"id": "220c0c0d-f275-4f95-b1da-4d8f576166c8", "address": "fa:16:3e:e4:f6:f6", "network": {"id": "302faf5b-b1e0-4048-920e-716cd79a6f7f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1208003028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "faee94ca03fc43149278a579ebe65682", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap220c0c0d-f2", "ovs_interfaceid": "220c0c0d-f275-4f95-b1da-4d8f576166c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1016.663604] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:f6:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73915082-a1b0-460b-b24d-97588fc9cb29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '220c0c0d-f275-4f95-b1da-4d8f576166c8', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1016.670942] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Creating folder: Project (faee94ca03fc43149278a579ebe65682). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1016.671689] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8f8c878e-7f8b-45d7-8e19-b7e9a93e3fe4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.686823] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Created folder: Project (faee94ca03fc43149278a579ebe65682) in parent group-v489562. [ 1016.687129] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Creating folder: Instances. Parent ref: group-v489790. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1016.687400] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-85cb4c78-d55a-4d13-b99a-de2e1dbfb64d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.700240] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Created folder: Instances in parent group-v489790. [ 1016.700780] env[62522]: DEBUG oslo.service.loopingcall [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1016.700944] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1016.701305] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bdf00952-13c9-4a90-8c7c-01deffd26918 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.723262] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1016.723262] env[62522]: value = "task-2415928" [ 1016.723262] env[62522]: _type = "Task" [ 1016.723262] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.727837] env[62522]: DEBUG nova.compute.utils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1016.732092] env[62522]: DEBUG nova.compute.manager [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1016.732302] env[62522]: DEBUG nova.network.neutron [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1016.744027] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415928, 'name': CreateVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.748162] env[62522]: DEBUG oslo_vmware.api [None req-a4e976bc-b245-4774-b252-3c78ae45b362 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415924, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.787255] env[62522]: DEBUG nova.policy [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a3d62ca8c3544b2ba6471af6a69a3d87', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'faee94ca03fc43149278a579ebe65682', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1016.946664] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415925, 'name': ReconfigVM_Task, 'duration_secs': 0.803755} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.947038] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Reconfigured VM instance instance-00000052 to attach disk [datastore2] 4e27a87c-4891-4e69-a6fa-312b026bf11e/4e27a87c-4891-4e69-a6fa-312b026bf11e.vmdk or device None with type streamOptimized {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1016.947833] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-17984ca0-f247-46db-a92a-de14e9b0fb07 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.955913] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 1016.955913] env[62522]: value = "task-2415929" [ 1016.955913] env[62522]: _type = "Task" [ 1016.955913] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.965572] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415929, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.048344] env[62522]: DEBUG nova.network.neutron [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Successfully created port: b498b6ac-e4c6-46e5-89f4-804206a74bcf {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1017.093560] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquiring lock "refresh_cache-4e9436df-c86b-429b-abc2-97f760858055" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1017.093710] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquired lock "refresh_cache-4e9436df-c86b-429b-abc2-97f760858055" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.093898] env[62522]: DEBUG nova.network.neutron [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1017.237016] env[62522]: DEBUG nova.compute.manager [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1017.239919] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415928, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.248533] env[62522]: DEBUG oslo_vmware.api [None req-a4e976bc-b245-4774-b252-3c78ae45b362 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415924, 'name': ReconfigVM_Task, 'duration_secs': 1.089092} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.248814] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4e976bc-b245-4774-b252-3c78ae45b362 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Reconfigured VM instance instance-00000033 to attach disk [datastore1] volume-56b832bf-8626-456e-9706-070e3adf329d/volume-56b832bf-8626-456e-9706-070e3adf329d.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1017.253448] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ee08f7c-9670-44c6-b5a3-99d5f36f5a3d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.269826] env[62522]: DEBUG oslo_vmware.api [None req-a4e976bc-b245-4774-b252-3c78ae45b362 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 1017.269826] env[62522]: value = "task-2415930" [ 1017.269826] env[62522]: _type = "Task" [ 1017.269826] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.281978] env[62522]: DEBUG oslo_vmware.api [None req-a4e976bc-b245-4774-b252-3c78ae45b362 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415930, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.469606] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415929, 'name': Rename_Task, 'duration_secs': 0.265085} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.469890] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1017.470184] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-43b7ae00-e0a0-497d-a6a4-b974d7b2a02f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.479549] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 1017.479549] env[62522]: value = "task-2415931" [ 1017.479549] env[62522]: _type = "Task" [ 1017.479549] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.491692] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415931, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.629890] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00a8e482-b1e1-4eb1-ae33-7b2c519c3bf6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.644800] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf1600cd-f7fe-4d0a-8f1a-ddc6080ba9da {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.648653] env[62522]: DEBUG nova.network.neutron [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1017.680696] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ac7abbf-6dd6-4783-b63a-329c697d46a5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.689641] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2004f72e-0a06-4c1e-9116-2eb1b3a5b073 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.704529] env[62522]: DEBUG nova.compute.provider_tree [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1017.736014] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415928, 'name': CreateVM_Task, 'duration_secs': 0.512152} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.736347] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1017.736991] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1017.737184] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.737511] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1017.737769] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f22b716b-b0d6-4012-9237-273a074f2a96 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.746540] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for the task: (returnval){ [ 1017.746540] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a85724-6f29-4f81-fdd7-45c2a828f6ba" [ 1017.746540] env[62522]: _type = "Task" [ 1017.746540] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.756018] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a85724-6f29-4f81-fdd7-45c2a828f6ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.783756] env[62522]: DEBUG oslo_vmware.api [None req-a4e976bc-b245-4774-b252-3c78ae45b362 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415930, 'name': ReconfigVM_Task, 'duration_secs': 0.186065} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.786650] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4e976bc-b245-4774-b252-3c78ae45b362 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489789', 'volume_id': '56b832bf-8626-456e-9706-070e3adf329d', 'name': 'volume-56b832bf-8626-456e-9706-070e3adf329d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'bf44e269-0297-473e-b6ce-04a40d0ec1b4', 'attached_at': '', 'detached_at': '', 'volume_id': '56b832bf-8626-456e-9706-070e3adf329d', 'serial': '56b832bf-8626-456e-9706-070e3adf329d'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1017.858274] env[62522]: DEBUG nova.network.neutron [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Updating instance_info_cache with network_info: [{"id": "18f42e25-7b00-475d-8f2f-b150679eeeef", "address": "fa:16:3e:5a:76:01", "network": {"id": "302faf5b-b1e0-4048-920e-716cd79a6f7f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1208003028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "faee94ca03fc43149278a579ebe65682", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18f42e25-7b", "ovs_interfaceid": "18f42e25-7b00-475d-8f2f-b150679eeeef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1017.989906] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415931, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.207607] env[62522]: DEBUG nova.scheduler.client.report [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1018.248097] env[62522]: DEBUG nova.compute.manager [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1018.262808] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a85724-6f29-4f81-fdd7-45c2a828f6ba, 'name': SearchDatastore_Task, 'duration_secs': 0.030079} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.263239] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1018.263587] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1018.263901] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1018.264095] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.264331] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1018.264665] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ebb57298-9102-43fa-894d-fbbc42735873 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.275767] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1018.276087] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1018.279238] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1018.279461] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1018.279772] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1018.280120] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1018.280347] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1018.280515] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1018.280748] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1018.280916] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1018.281426] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1018.281426] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1018.281658] env[62522]: DEBUG nova.virt.hardware [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1018.282042] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-529d7fbb-0e90-4679-befc-864b90a77496 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.285747] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd60866a-b359-4234-a82e-454f14d4ee8e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.293715] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for the task: (returnval){ [ 1018.293715] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52708a66-f246-a38b-d3d4-a60060b490f6" [ 1018.293715] env[62522]: _type = "Task" [ 1018.293715] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.300528] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4ea71a6-90ce-49bd-a862-54f9fc4cbae9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.310750] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52708a66-f246-a38b-d3d4-a60060b490f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.361169] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Releasing lock "refresh_cache-4e9436df-c86b-429b-abc2-97f760858055" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1018.361575] env[62522]: DEBUG nova.compute.manager [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Instance network_info: |[{"id": "18f42e25-7b00-475d-8f2f-b150679eeeef", "address": "fa:16:3e:5a:76:01", "network": {"id": "302faf5b-b1e0-4048-920e-716cd79a6f7f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1208003028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "faee94ca03fc43149278a579ebe65682", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18f42e25-7b", "ovs_interfaceid": "18f42e25-7b00-475d-8f2f-b150679eeeef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1018.362009] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5a:76:01', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73915082-a1b0-460b-b24d-97588fc9cb29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '18f42e25-7b00-475d-8f2f-b150679eeeef', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1018.369986] env[62522]: DEBUG oslo.service.loopingcall [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1018.370312] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1018.370560] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-abdebced-c2e7-4445-99d0-79fb98e8c192 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.393064] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1018.393064] env[62522]: value = "task-2415932" [ 1018.393064] env[62522]: _type = "Task" [ 1018.393064] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.405078] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415932, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.492818] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415931, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.515579] env[62522]: DEBUG nova.compute.manager [req-f2bdf6da-bd03-44df-83e0-9a2c9d3e9d91 req-6d79cf3a-c030-4ec0-8e4e-4678e738ee48 service nova] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Received event network-changed-18f42e25-7b00-475d-8f2f-b150679eeeef {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1018.515725] env[62522]: DEBUG nova.compute.manager [req-f2bdf6da-bd03-44df-83e0-9a2c9d3e9d91 req-6d79cf3a-c030-4ec0-8e4e-4678e738ee48 service nova] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Refreshing instance network info cache due to event network-changed-18f42e25-7b00-475d-8f2f-b150679eeeef. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1018.515956] env[62522]: DEBUG oslo_concurrency.lockutils [req-f2bdf6da-bd03-44df-83e0-9a2c9d3e9d91 req-6d79cf3a-c030-4ec0-8e4e-4678e738ee48 service nova] Acquiring lock "refresh_cache-4e9436df-c86b-429b-abc2-97f760858055" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1018.516183] env[62522]: DEBUG oslo_concurrency.lockutils [req-f2bdf6da-bd03-44df-83e0-9a2c9d3e9d91 req-6d79cf3a-c030-4ec0-8e4e-4678e738ee48 service nova] Acquired lock "refresh_cache-4e9436df-c86b-429b-abc2-97f760858055" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.516364] env[62522]: DEBUG nova.network.neutron [req-f2bdf6da-bd03-44df-83e0-9a2c9d3e9d91 req-6d79cf3a-c030-4ec0-8e4e-4678e738ee48 service nova] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Refreshing network info cache for port 18f42e25-7b00-475d-8f2f-b150679eeeef {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1018.606366] env[62522]: DEBUG nova.network.neutron [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Successfully updated port: b498b6ac-e4c6-46e5-89f4-804206a74bcf {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1018.716619] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.494s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.716619] env[62522]: DEBUG nova.compute.manager [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1018.717458] env[62522]: DEBUG oslo_concurrency.lockutils [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.299s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.718368] env[62522]: INFO nova.compute.claims [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1018.807789] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52708a66-f246-a38b-d3d4-a60060b490f6, 'name': SearchDatastore_Task, 'duration_secs': 0.029173} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.808620] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0d94f02-c9dd-475d-8ff1-1026f95956c4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.815874] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for the task: (returnval){ [ 1018.815874] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a3e4ca-966f-a7c7-442b-52d4786a0bb9" [ 1018.815874] env[62522]: _type = "Task" [ 1018.815874] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.828881] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a3e4ca-966f-a7c7-442b-52d4786a0bb9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.831124] env[62522]: DEBUG nova.objects.instance [None req-a4e976bc-b245-4774-b252-3c78ae45b362 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lazy-loading 'flavor' on Instance uuid bf44e269-0297-473e-b6ce-04a40d0ec1b4 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1018.904999] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415932, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.992291] env[62522]: DEBUG oslo_vmware.api [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415931, 'name': PowerOnVM_Task, 'duration_secs': 1.279444} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.993864] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1018.994088] env[62522]: INFO nova.compute.manager [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Took 16.82 seconds to spawn the instance on the hypervisor. [ 1018.994271] env[62522]: DEBUG nova.compute.manager [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1018.997057] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-493e8746-af9c-450a-97ce-3ceaa7dd6e4f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.109412] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquiring lock "refresh_cache-97f4c6ab-04de-4069-8ce0-1509c30ffb0f" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1019.109543] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquired lock "refresh_cache-97f4c6ab-04de-4069-8ce0-1509c30ffb0f" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.109662] env[62522]: DEBUG nova.network.neutron [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1019.222860] env[62522]: DEBUG nova.compute.utils [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1019.227854] env[62522]: DEBUG nova.compute.manager [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1019.227854] env[62522]: DEBUG nova.network.neutron [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1019.235944] env[62522]: DEBUG nova.network.neutron [req-f2bdf6da-bd03-44df-83e0-9a2c9d3e9d91 req-6d79cf3a-c030-4ec0-8e4e-4678e738ee48 service nova] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Updated VIF entry in instance network info cache for port 18f42e25-7b00-475d-8f2f-b150679eeeef. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1019.236312] env[62522]: DEBUG nova.network.neutron [req-f2bdf6da-bd03-44df-83e0-9a2c9d3e9d91 req-6d79cf3a-c030-4ec0-8e4e-4678e738ee48 service nova] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Updating instance_info_cache with network_info: [{"id": "18f42e25-7b00-475d-8f2f-b150679eeeef", "address": "fa:16:3e:5a:76:01", "network": {"id": "302faf5b-b1e0-4048-920e-716cd79a6f7f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1208003028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "faee94ca03fc43149278a579ebe65682", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18f42e25-7b", "ovs_interfaceid": "18f42e25-7b00-475d-8f2f-b150679eeeef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1019.269389] env[62522]: DEBUG nova.policy [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0b81d399f06a47bc819693b52bb74004', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ff5da278d2be4ca983424c8291beadec', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1019.328137] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a3e4ca-966f-a7c7-442b-52d4786a0bb9, 'name': SearchDatastore_Task, 'duration_secs': 0.015156} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.328435] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1019.328698] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 921c14c9-27fa-4eda-9831-6263ad0d6c57/921c14c9-27fa-4eda-9831-6263ad0d6c57.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1019.328971] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c241f9b0-65b1-479c-b0ea-e85540ca616b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.338066] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for the task: (returnval){ [ 1019.338066] env[62522]: value = "task-2415933" [ 1019.338066] env[62522]: _type = "Task" [ 1019.338066] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.338560] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a4e976bc-b245-4774-b252-3c78ae45b362 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "bf44e269-0297-473e-b6ce-04a40d0ec1b4" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.359s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.349130] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415933, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.404919] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415932, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.500767] env[62522]: INFO nova.compute.manager [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Rescuing [ 1019.501084] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "refresh_cache-bf44e269-0297-473e-b6ce-04a40d0ec1b4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1019.501252] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquired lock "refresh_cache-bf44e269-0297-473e-b6ce-04a40d0ec1b4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.501428] env[62522]: DEBUG nova.network.neutron [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1019.520028] env[62522]: INFO nova.compute.manager [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Took 28.45 seconds to build instance. [ 1019.601163] env[62522]: DEBUG nova.network.neutron [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Successfully created port: 9e10cc19-76da-49d9-80b6-068ce128a1b0 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1019.645037] env[62522]: DEBUG nova.network.neutron [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1019.732778] env[62522]: DEBUG nova.compute.manager [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1019.738688] env[62522]: DEBUG oslo_concurrency.lockutils [req-f2bdf6da-bd03-44df-83e0-9a2c9d3e9d91 req-6d79cf3a-c030-4ec0-8e4e-4678e738ee48 service nova] Releasing lock "refresh_cache-4e9436df-c86b-429b-abc2-97f760858055" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1019.738948] env[62522]: DEBUG nova.compute.manager [req-f2bdf6da-bd03-44df-83e0-9a2c9d3e9d91 req-6d79cf3a-c030-4ec0-8e4e-4678e738ee48 service nova] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Received event network-vif-plugged-b498b6ac-e4c6-46e5-89f4-804206a74bcf {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1019.739164] env[62522]: DEBUG oslo_concurrency.lockutils [req-f2bdf6da-bd03-44df-83e0-9a2c9d3e9d91 req-6d79cf3a-c030-4ec0-8e4e-4678e738ee48 service nova] Acquiring lock "97f4c6ab-04de-4069-8ce0-1509c30ffb0f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.739367] env[62522]: DEBUG oslo_concurrency.lockutils [req-f2bdf6da-bd03-44df-83e0-9a2c9d3e9d91 req-6d79cf3a-c030-4ec0-8e4e-4678e738ee48 service nova] Lock "97f4c6ab-04de-4069-8ce0-1509c30ffb0f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.739531] env[62522]: DEBUG oslo_concurrency.lockutils [req-f2bdf6da-bd03-44df-83e0-9a2c9d3e9d91 req-6d79cf3a-c030-4ec0-8e4e-4678e738ee48 service nova] Lock "97f4c6ab-04de-4069-8ce0-1509c30ffb0f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.739704] env[62522]: DEBUG nova.compute.manager [req-f2bdf6da-bd03-44df-83e0-9a2c9d3e9d91 req-6d79cf3a-c030-4ec0-8e4e-4678e738ee48 service nova] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] No waiting events found dispatching network-vif-plugged-b498b6ac-e4c6-46e5-89f4-804206a74bcf {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1019.739868] env[62522]: WARNING nova.compute.manager [req-f2bdf6da-bd03-44df-83e0-9a2c9d3e9d91 req-6d79cf3a-c030-4ec0-8e4e-4678e738ee48 service nova] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Received unexpected event network-vif-plugged-b498b6ac-e4c6-46e5-89f4-804206a74bcf for instance with vm_state building and task_state spawning. [ 1019.819169] env[62522]: DEBUG nova.network.neutron [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Updating instance_info_cache with network_info: [{"id": "b498b6ac-e4c6-46e5-89f4-804206a74bcf", "address": "fa:16:3e:c8:3b:f8", "network": {"id": "302faf5b-b1e0-4048-920e-716cd79a6f7f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1208003028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "faee94ca03fc43149278a579ebe65682", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb498b6ac-e4", "ovs_interfaceid": "b498b6ac-e4c6-46e5-89f4-804206a74bcf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1019.855635] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415933, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.909209] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415932, 'name': CreateVM_Task, 'duration_secs': 1.418535} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.909476] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1019.910327] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1019.910598] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.910996] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1019.911334] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0bd08efb-a68f-4f12-8ab3-311e6b37b2d4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.918995] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for the task: (returnval){ [ 1019.918995] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52228e3a-9b27-7bb0-772c-a6c3082f3deb" [ 1019.918995] env[62522]: _type = "Task" [ 1019.918995] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.930258] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52228e3a-9b27-7bb0-772c-a6c3082f3deb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.024991] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5faeeff4-27b5-4c92-848f-ca1001338573 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "4e27a87c-4891-4e69-a6fa-312b026bf11e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.960s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.200737] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03b07e3f-e0e2-4278-ace6-0cf69cc44f82 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.212899] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a42efd9d-7e99-4f3a-b1e9-9082cfad3cba {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.254280] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10cb5943-14a7-4003-aad1-4ae40212f15a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.263429] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da91a81f-34f9-4e19-a808-40c359200701 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.279300] env[62522]: DEBUG nova.compute.provider_tree [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1020.297893] env[62522]: DEBUG nova.network.neutron [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Updating instance_info_cache with network_info: [{"id": "36fe2fd3-3447-4032-8c02-5be9712b769d", "address": "fa:16:3e:2e:5d:25", "network": {"id": "5f1d73d1-ff9e-4081-87cf-8df6294f67c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-892212702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "962664c996f24cf9ae192f79fae18ca4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "419a5b3f-4c6f-4168-9def-746b4d8c5c24", "external-id": "nsx-vlan-transportzone-656", "segmentation_id": 656, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36fe2fd3-34", "ovs_interfaceid": "36fe2fd3-3447-4032-8c02-5be9712b769d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.328956] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Releasing lock "refresh_cache-97f4c6ab-04de-4069-8ce0-1509c30ffb0f" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1020.328956] env[62522]: DEBUG nova.compute.manager [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Instance network_info: |[{"id": "b498b6ac-e4c6-46e5-89f4-804206a74bcf", "address": "fa:16:3e:c8:3b:f8", "network": {"id": "302faf5b-b1e0-4048-920e-716cd79a6f7f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1208003028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "faee94ca03fc43149278a579ebe65682", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb498b6ac-e4", "ovs_interfaceid": "b498b6ac-e4c6-46e5-89f4-804206a74bcf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1020.329331] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c8:3b:f8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73915082-a1b0-460b-b24d-97588fc9cb29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b498b6ac-e4c6-46e5-89f4-804206a74bcf', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1020.336810] env[62522]: DEBUG oslo.service.loopingcall [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1020.337044] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1020.337269] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b75a6eba-aaa4-42f5-a35f-092da19aef73 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.361594] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415933, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.833401} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.363076] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 921c14c9-27fa-4eda-9831-6263ad0d6c57/921c14c9-27fa-4eda-9831-6263ad0d6c57.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1020.363369] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1020.363618] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1020.363618] env[62522]: value = "task-2415934" [ 1020.363618] env[62522]: _type = "Task" [ 1020.363618] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.363797] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bdb292b7-cef1-409a-bcad-3c9299c61051 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.375056] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415934, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.376640] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for the task: (returnval){ [ 1020.376640] env[62522]: value = "task-2415935" [ 1020.376640] env[62522]: _type = "Task" [ 1020.376640] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.387130] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415935, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.431347] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52228e3a-9b27-7bb0-772c-a6c3082f3deb, 'name': SearchDatastore_Task, 'duration_secs': 0.06481} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.431675] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1020.431997] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1020.432280] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1020.432517] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.432681] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1020.433023] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4557672b-ba39-4441-a7b4-f9c4f1e2903f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.445399] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1020.445696] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1020.446579] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4289189-3322-4b45-b3ae-d507e8be9035 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.453479] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for the task: (returnval){ [ 1020.453479] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520af03a-92b3-faba-f425-ff80ec749fa8" [ 1020.453479] env[62522]: _type = "Task" [ 1020.453479] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.465568] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520af03a-92b3-faba-f425-ff80ec749fa8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.545606] env[62522]: DEBUG nova.compute.manager [req-f1f916e4-3aa2-4ffe-b8cf-f6ed784c2373 req-6270103e-07b2-4308-bb46-735f208f5e47 service nova] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Received event network-changed-b498b6ac-e4c6-46e5-89f4-804206a74bcf {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1020.545837] env[62522]: DEBUG nova.compute.manager [req-f1f916e4-3aa2-4ffe-b8cf-f6ed784c2373 req-6270103e-07b2-4308-bb46-735f208f5e47 service nova] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Refreshing instance network info cache due to event network-changed-b498b6ac-e4c6-46e5-89f4-804206a74bcf. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1020.546313] env[62522]: DEBUG oslo_concurrency.lockutils [req-f1f916e4-3aa2-4ffe-b8cf-f6ed784c2373 req-6270103e-07b2-4308-bb46-735f208f5e47 service nova] Acquiring lock "refresh_cache-97f4c6ab-04de-4069-8ce0-1509c30ffb0f" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1020.546531] env[62522]: DEBUG oslo_concurrency.lockutils [req-f1f916e4-3aa2-4ffe-b8cf-f6ed784c2373 req-6270103e-07b2-4308-bb46-735f208f5e47 service nova] Acquired lock "refresh_cache-97f4c6ab-04de-4069-8ce0-1509c30ffb0f" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.546848] env[62522]: DEBUG nova.network.neutron [req-f1f916e4-3aa2-4ffe-b8cf-f6ed784c2373 req-6270103e-07b2-4308-bb46-735f208f5e47 service nova] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Refreshing network info cache for port b498b6ac-e4c6-46e5-89f4-804206a74bcf {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1020.682988] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "4e27a87c-4891-4e69-a6fa-312b026bf11e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1020.683260] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "4e27a87c-4891-4e69-a6fa-312b026bf11e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1020.683479] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "4e27a87c-4891-4e69-a6fa-312b026bf11e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1020.683741] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "4e27a87c-4891-4e69-a6fa-312b026bf11e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1020.683997] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "4e27a87c-4891-4e69-a6fa-312b026bf11e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.686345] env[62522]: INFO nova.compute.manager [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Terminating instance [ 1020.758429] env[62522]: DEBUG nova.compute.manager [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1020.783409] env[62522]: DEBUG nova.scheduler.client.report [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1020.788965] env[62522]: DEBUG nova.virt.hardware [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1020.789327] env[62522]: DEBUG nova.virt.hardware [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1020.789574] env[62522]: DEBUG nova.virt.hardware [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1020.789858] env[62522]: DEBUG nova.virt.hardware [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1020.790109] env[62522]: DEBUG nova.virt.hardware [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1020.790355] env[62522]: DEBUG nova.virt.hardware [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1020.790682] env[62522]: DEBUG nova.virt.hardware [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1020.790930] env[62522]: DEBUG nova.virt.hardware [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1020.791235] env[62522]: DEBUG nova.virt.hardware [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1020.791699] env[62522]: DEBUG nova.virt.hardware [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1020.791817] env[62522]: DEBUG nova.virt.hardware [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1020.792883] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b916465d-a06e-4c5f-81b1-696b00068b20 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.801197] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Releasing lock "refresh_cache-bf44e269-0297-473e-b6ce-04a40d0ec1b4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1020.804581] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd6b090a-0bb1-40aa-897f-f2f7f5a0cd03 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.878373] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415934, 'name': CreateVM_Task, 'duration_secs': 0.433935} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.881666] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1020.882401] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1020.882594] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.882972] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1020.883616] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49e61807-7f5f-4450-8f0b-625e4671459f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.888883] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415935, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091396} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.890243] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1020.890611] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for the task: (returnval){ [ 1020.890611] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520cfded-c37f-6c55-ae34-8c71b2f7bf80" [ 1020.890611] env[62522]: _type = "Task" [ 1020.890611] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.891369] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f475fa3-019b-4cae-ac74-38a710272d31 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.905129] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520cfded-c37f-6c55-ae34-8c71b2f7bf80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.921308] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] 921c14c9-27fa-4eda-9831-6263ad0d6c57/921c14c9-27fa-4eda-9831-6263ad0d6c57.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1020.921619] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0a8ce157-f7da-4988-8c06-4f2cf621bba8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.942518] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for the task: (returnval){ [ 1020.942518] env[62522]: value = "task-2415936" [ 1020.942518] env[62522]: _type = "Task" [ 1020.942518] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.951437] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415936, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.963956] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520af03a-92b3-faba-f425-ff80ec749fa8, 'name': SearchDatastore_Task, 'duration_secs': 0.014455} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.964787] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93fb3b65-795a-4046-8b81-52d89b0a0b4d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.970542] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for the task: (returnval){ [ 1020.970542] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52980c24-1e89-3bad-fec7-d357fa2cc469" [ 1020.970542] env[62522]: _type = "Task" [ 1020.970542] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.979431] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52980c24-1e89-3bad-fec7-d357fa2cc469, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.191966] env[62522]: DEBUG nova.compute.manager [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1021.192238] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1021.193176] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59410f17-8af0-4888-ad8d-1216acb58f60 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.202180] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1021.202457] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4167fba3-3d98-44ae-9b79-c297921d762c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.213056] env[62522]: DEBUG oslo_vmware.api [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 1021.213056] env[62522]: value = "task-2415937" [ 1021.213056] env[62522]: _type = "Task" [ 1021.213056] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.223111] env[62522]: DEBUG oslo_vmware.api [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415937, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.287073] env[62522]: DEBUG nova.network.neutron [req-f1f916e4-3aa2-4ffe-b8cf-f6ed784c2373 req-6270103e-07b2-4308-bb46-735f208f5e47 service nova] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Updated VIF entry in instance network info cache for port b498b6ac-e4c6-46e5-89f4-804206a74bcf. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1021.287073] env[62522]: DEBUG nova.network.neutron [req-f1f916e4-3aa2-4ffe-b8cf-f6ed784c2373 req-6270103e-07b2-4308-bb46-735f208f5e47 service nova] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Updating instance_info_cache with network_info: [{"id": "b498b6ac-e4c6-46e5-89f4-804206a74bcf", "address": "fa:16:3e:c8:3b:f8", "network": {"id": "302faf5b-b1e0-4048-920e-716cd79a6f7f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1208003028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "faee94ca03fc43149278a579ebe65682", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb498b6ac-e4", "ovs_interfaceid": "b498b6ac-e4c6-46e5-89f4-804206a74bcf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.300246] env[62522]: DEBUG oslo_concurrency.lockutils [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.583s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.300778] env[62522]: DEBUG nova.compute.manager [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1021.307729] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.609s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.307992] env[62522]: DEBUG nova.objects.instance [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Lazy-loading 'resources' on Instance uuid 895e6716-44cf-45b2-afd8-eaba71c32460 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1021.311250] env[62522]: DEBUG nova.compute.manager [req-b22bd623-6e79-416d-aab8-d62f6fc6f7a2 req-b0b155c3-4e05-412a-82d3-d7669943da3b service nova] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Received event network-vif-plugged-9e10cc19-76da-49d9-80b6-068ce128a1b0 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1021.311410] env[62522]: DEBUG oslo_concurrency.lockutils [req-b22bd623-6e79-416d-aab8-d62f6fc6f7a2 req-b0b155c3-4e05-412a-82d3-d7669943da3b service nova] Acquiring lock "cabe40a0-8bd0-4d77-b949-298bd194fa42-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1021.311617] env[62522]: DEBUG oslo_concurrency.lockutils [req-b22bd623-6e79-416d-aab8-d62f6fc6f7a2 req-b0b155c3-4e05-412a-82d3-d7669943da3b service nova] Lock "cabe40a0-8bd0-4d77-b949-298bd194fa42-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.311789] env[62522]: DEBUG oslo_concurrency.lockutils [req-b22bd623-6e79-416d-aab8-d62f6fc6f7a2 req-b0b155c3-4e05-412a-82d3-d7669943da3b service nova] Lock "cabe40a0-8bd0-4d77-b949-298bd194fa42-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.311956] env[62522]: DEBUG nova.compute.manager [req-b22bd623-6e79-416d-aab8-d62f6fc6f7a2 req-b0b155c3-4e05-412a-82d3-d7669943da3b service nova] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] No waiting events found dispatching network-vif-plugged-9e10cc19-76da-49d9-80b6-068ce128a1b0 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1021.312755] env[62522]: WARNING nova.compute.manager [req-b22bd623-6e79-416d-aab8-d62f6fc6f7a2 req-b0b155c3-4e05-412a-82d3-d7669943da3b service nova] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Received unexpected event network-vif-plugged-9e10cc19-76da-49d9-80b6-068ce128a1b0 for instance with vm_state building and task_state spawning. [ 1021.408608] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520cfded-c37f-6c55-ae34-8c71b2f7bf80, 'name': SearchDatastore_Task, 'duration_secs': 0.013785} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.408608] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1021.408608] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1021.408608] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1021.408608] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1021.408608] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1021.408608] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-29a6d140-c25a-440d-968c-57868dc44e4b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.416948] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1021.417547] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1021.419026] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7f3f040-f63d-43b9-ba5d-dd0652bf010e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.429131] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for the task: (returnval){ [ 1021.429131] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d065c6-07b9-cc1d-e176-60f7e5ed0a66" [ 1021.429131] env[62522]: _type = "Task" [ 1021.429131] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.440035] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d065c6-07b9-cc1d-e176-60f7e5ed0a66, 'name': SearchDatastore_Task, 'duration_secs': 0.011248} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.442823] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d72920a9-dda6-4163-a746-7dca06bc1ac0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.460629] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415936, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.463205] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for the task: (returnval){ [ 1021.463205] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5261694e-4b27-8a7f-85cf-94fd847f1a05" [ 1021.463205] env[62522]: _type = "Task" [ 1021.463205] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.473505] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5261694e-4b27-8a7f-85cf-94fd847f1a05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.486222] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52980c24-1e89-3bad-fec7-d357fa2cc469, 'name': SearchDatastore_Task, 'duration_secs': 0.013579} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.486549] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1021.486868] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 4e9436df-c86b-429b-abc2-97f760858055/4e9436df-c86b-429b-abc2-97f760858055.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1021.487202] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c0e6a405-04ef-4326-b374-8488b0e546e5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.496071] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for the task: (returnval){ [ 1021.496071] env[62522]: value = "task-2415938" [ 1021.496071] env[62522]: _type = "Task" [ 1021.496071] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.505694] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415938, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.542372] env[62522]: DEBUG nova.network.neutron [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Successfully updated port: 9e10cc19-76da-49d9-80b6-068ce128a1b0 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1021.724606] env[62522]: DEBUG oslo_vmware.api [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415937, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.789603] env[62522]: DEBUG oslo_concurrency.lockutils [req-f1f916e4-3aa2-4ffe-b8cf-f6ed784c2373 req-6270103e-07b2-4308-bb46-735f208f5e47 service nova] Releasing lock "refresh_cache-97f4c6ab-04de-4069-8ce0-1509c30ffb0f" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1021.816576] env[62522]: DEBUG nova.compute.utils [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1021.822551] env[62522]: DEBUG nova.compute.manager [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1021.822646] env[62522]: DEBUG nova.network.neutron [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1021.904769] env[62522]: DEBUG nova.policy [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9694ee575d094ccf845eb57acf3e70c6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '00b27498c07344d1bf9cecefa0fca033', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1021.966918] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415936, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.984964] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5261694e-4b27-8a7f-85cf-94fd847f1a05, 'name': SearchDatastore_Task, 'duration_secs': 0.015797} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.988492] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1021.988809] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 97f4c6ab-04de-4069-8ce0-1509c30ffb0f/97f4c6ab-04de-4069-8ce0-1509c30ffb0f.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1021.989423] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-62d5553b-ef76-42cc-b481-7d8384f2e3ea {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.006616] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for the task: (returnval){ [ 1022.006616] env[62522]: value = "task-2415939" [ 1022.006616] env[62522]: _type = "Task" [ 1022.006616] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.013646] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415938, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.023511] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415939, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.044830] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "refresh_cache-cabe40a0-8bd0-4d77-b949-298bd194fa42" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1022.045071] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquired lock "refresh_cache-cabe40a0-8bd0-4d77-b949-298bd194fa42" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.045328] env[62522]: DEBUG nova.network.neutron [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1022.229488] env[62522]: DEBUG oslo_vmware.api [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415937, 'name': PowerOffVM_Task, 'duration_secs': 0.744859} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.229816] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1022.230123] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1022.230433] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b17bdf21-1eb1-445b-8f98-e00414236ec0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.269249] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b40d5e7c-2e19-4761-beea-7ca7fe62b429 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.278416] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-547d1869-44de-4657-a4d5-4d69d638c344 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.319242] env[62522]: DEBUG nova.network.neutron [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Successfully created port: 6adfce51-a4d5-4682-bee9-e6bea918aa38 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1022.322523] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-789b80d3-4983-4524-ae95-463d3092a3b6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.326286] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1022.326286] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1022.326286] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Deleting the datastore file [datastore2] 4e27a87c-4891-4e69-a6fa-312b026bf11e {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1022.326847] env[62522]: DEBUG nova.compute.manager [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1022.330691] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-11faad9c-985b-4e5b-9592-5761bb0bcc57 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.342031] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1022.342422] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-63e45b9c-4144-4b02-b38a-0f13ae23b28e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.345815] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-450703d6-5561-4029-aea2-23aaa238cad4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.354025] env[62522]: DEBUG oslo_vmware.api [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 1022.354025] env[62522]: value = "task-2415941" [ 1022.354025] env[62522]: _type = "Task" [ 1022.354025] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.364989] env[62522]: DEBUG nova.compute.provider_tree [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1022.371034] env[62522]: DEBUG oslo_vmware.api [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 1022.371034] env[62522]: value = "task-2415942" [ 1022.371034] env[62522]: _type = "Task" [ 1022.371034] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.376103] env[62522]: DEBUG oslo_vmware.api [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415941, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.385545] env[62522]: DEBUG oslo_vmware.api [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415942, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.461390] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415936, 'name': ReconfigVM_Task, 'duration_secs': 1.161103} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.461857] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Reconfigured VM instance instance-00000053 to attach disk [datastore1] 921c14c9-27fa-4eda-9831-6263ad0d6c57/921c14c9-27fa-4eda-9831-6263ad0d6c57.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1022.462796] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bcdb43ff-b1ee-4afa-a34f-ec0104222412 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.473482] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for the task: (returnval){ [ 1022.473482] env[62522]: value = "task-2415943" [ 1022.473482] env[62522]: _type = "Task" [ 1022.473482] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.488158] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415943, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.508715] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415938, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.581673} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.512770] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 4e9436df-c86b-429b-abc2-97f760858055/4e9436df-c86b-429b-abc2-97f760858055.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1022.513062] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1022.513793] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-73b6d631-a8c9-4d84-9326-feb1b5c08dbb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.523140] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415939, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.524371] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for the task: (returnval){ [ 1022.524371] env[62522]: value = "task-2415944" [ 1022.524371] env[62522]: _type = "Task" [ 1022.524371] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.535445] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415944, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.601959] env[62522]: DEBUG nova.network.neutron [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1022.797450] env[62522]: DEBUG nova.network.neutron [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Updating instance_info_cache with network_info: [{"id": "9e10cc19-76da-49d9-80b6-068ce128a1b0", "address": "fa:16:3e:3f:35:de", "network": {"id": "70e81afa-0eda-49c5-b072-e79b3c287468", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1715169983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff5da278d2be4ca983424c8291beadec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7654928b-7afe-42e3-a18d-68ecc775cefe", "external-id": "cl2-zone-807", "segmentation_id": 807, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e10cc19-76", "ovs_interfaceid": "9e10cc19-76da-49d9-80b6-068ce128a1b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1022.863588] env[62522]: DEBUG oslo_vmware.api [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415941, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.870019] env[62522]: DEBUG nova.scheduler.client.report [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1022.891590] env[62522]: DEBUG oslo_vmware.api [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415942, 'name': PowerOffVM_Task, 'duration_secs': 0.285632} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.891902] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1022.892765] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c66c8dca-3ff1-4cff-89c4-3e96b3de9f9e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.916266] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-953aed1c-aa9e-42eb-bc22-091bfa5afc52 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.954451] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1022.954759] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7274931d-ff74-44a8-8aaf-b9c8f4db8f45 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.962900] env[62522]: DEBUG oslo_vmware.api [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 1022.962900] env[62522]: value = "task-2415945" [ 1022.962900] env[62522]: _type = "Task" [ 1022.962900] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.971414] env[62522]: DEBUG oslo_vmware.api [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415945, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.982970] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415943, 'name': Rename_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.018561] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415939, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.603126} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.018822] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 97f4c6ab-04de-4069-8ce0-1509c30ffb0f/97f4c6ab-04de-4069-8ce0-1509c30ffb0f.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1023.019120] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1023.019396] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e2026b08-5052-47bb-bdaa-2b002b567cf6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.026517] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for the task: (returnval){ [ 1023.026517] env[62522]: value = "task-2415946" [ 1023.026517] env[62522]: _type = "Task" [ 1023.026517] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.039798] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415944, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.103449} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.043121] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1023.043870] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415946, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.044647] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3365021-66f0-4e5d-8e59-7ce861c368e2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.070401] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 4e9436df-c86b-429b-abc2-97f760858055/4e9436df-c86b-429b-abc2-97f760858055.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1023.070678] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5e630ff4-1813-4b25-9402-7dc167400d45 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.093753] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for the task: (returnval){ [ 1023.093753] env[62522]: value = "task-2415947" [ 1023.093753] env[62522]: _type = "Task" [ 1023.093753] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.106173] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415947, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.299074] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Releasing lock "refresh_cache-cabe40a0-8bd0-4d77-b949-298bd194fa42" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1023.299480] env[62522]: DEBUG nova.compute.manager [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Instance network_info: |[{"id": "9e10cc19-76da-49d9-80b6-068ce128a1b0", "address": "fa:16:3e:3f:35:de", "network": {"id": "70e81afa-0eda-49c5-b072-e79b3c287468", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1715169983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff5da278d2be4ca983424c8291beadec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7654928b-7afe-42e3-a18d-68ecc775cefe", "external-id": "cl2-zone-807", "segmentation_id": 807, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e10cc19-76", "ovs_interfaceid": "9e10cc19-76da-49d9-80b6-068ce128a1b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1023.299938] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:35:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7654928b-7afe-42e3-a18d-68ecc775cefe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e10cc19-76da-49d9-80b6-068ce128a1b0', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1023.307869] env[62522]: DEBUG oslo.service.loopingcall [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1023.308601] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1023.308601] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e8db32fd-0238-41fa-a07d-a8931ca1ad86 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.330757] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1023.330757] env[62522]: value = "task-2415948" [ 1023.330757] env[62522]: _type = "Task" [ 1023.330757] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.340551] env[62522]: DEBUG nova.compute.manager [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1023.342629] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415948, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.366163] env[62522]: DEBUG oslo_vmware.api [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415941, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.547579} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.368529] env[62522]: DEBUG nova.virt.hardware [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1023.368813] env[62522]: DEBUG nova.virt.hardware [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1023.368971] env[62522]: DEBUG nova.virt.hardware [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1023.369247] env[62522]: DEBUG nova.virt.hardware [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1023.369439] env[62522]: DEBUG nova.virt.hardware [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1023.369604] env[62522]: DEBUG nova.virt.hardware [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1023.369815] env[62522]: DEBUG nova.virt.hardware [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1023.369978] env[62522]: DEBUG nova.virt.hardware [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1023.370158] env[62522]: DEBUG nova.virt.hardware [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1023.370323] env[62522]: DEBUG nova.virt.hardware [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1023.370492] env[62522]: DEBUG nova.virt.hardware [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1023.370795] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1023.370969] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1023.371188] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1023.371367] env[62522]: INFO nova.compute.manager [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Took 2.18 seconds to destroy the instance on the hypervisor. [ 1023.371607] env[62522]: DEBUG oslo.service.loopingcall [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1023.372386] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57eb0662-1492-4d55-8443-43aa84a67408 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.375014] env[62522]: DEBUG nova.compute.manager [-] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1023.375133] env[62522]: DEBUG nova.network.neutron [-] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1023.379038] env[62522]: DEBUG nova.compute.manager [req-32cd8855-817e-460f-b548-9d9836a79595 req-1df65e81-367d-40f7-aaa5-f76ccc546088 service nova] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Received event network-changed-9e10cc19-76da-49d9-80b6-068ce128a1b0 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1023.379245] env[62522]: DEBUG nova.compute.manager [req-32cd8855-817e-460f-b548-9d9836a79595 req-1df65e81-367d-40f7-aaa5-f76ccc546088 service nova] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Refreshing instance network info cache due to event network-changed-9e10cc19-76da-49d9-80b6-068ce128a1b0. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1023.379583] env[62522]: DEBUG oslo_concurrency.lockutils [req-32cd8855-817e-460f-b548-9d9836a79595 req-1df65e81-367d-40f7-aaa5-f76ccc546088 service nova] Acquiring lock "refresh_cache-cabe40a0-8bd0-4d77-b949-298bd194fa42" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1023.379914] env[62522]: DEBUG oslo_concurrency.lockutils [req-32cd8855-817e-460f-b548-9d9836a79595 req-1df65e81-367d-40f7-aaa5-f76ccc546088 service nova] Acquired lock "refresh_cache-cabe40a0-8bd0-4d77-b949-298bd194fa42" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.380475] env[62522]: DEBUG nova.network.neutron [req-32cd8855-817e-460f-b548-9d9836a79595 req-1df65e81-367d-40f7-aaa5-f76ccc546088 service nova] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Refreshing network info cache for port 9e10cc19-76da-49d9-80b6-068ce128a1b0 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1023.385630] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.077s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1023.387297] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fa085f28-b3b5-4d63-a19a-a9a819e80b50 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.073s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1023.387441] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fa085f28-b3b5-4d63-a19a-a9a819e80b50 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1023.389210] env[62522]: DEBUG oslo_concurrency.lockutils [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.600s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1023.390642] env[62522]: INFO nova.compute.claims [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1023.394442] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-160f4310-b7c1-44e6-b583-7381222ec596 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.423634] env[62522]: INFO nova.scheduler.client.report [None req-fa085f28-b3b5-4d63-a19a-a9a819e80b50 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Deleted allocations for instance 917469c5-20be-4814-814f-a042415be021 [ 1023.425345] env[62522]: INFO nova.scheduler.client.report [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Deleted allocations for instance 895e6716-44cf-45b2-afd8-eaba71c32460 [ 1023.461214] env[62522]: DEBUG oslo_vmware.rw_handles [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52978378-76f7-07fa-d339-45478cbf7ce3/disk-0.vmdk. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1023.462546] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afef046e-1785-432a-9b62-067b2a618da9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.477209] env[62522]: DEBUG oslo_vmware.rw_handles [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52978378-76f7-07fa-d339-45478cbf7ce3/disk-0.vmdk is in state: ready. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1023.477209] env[62522]: ERROR oslo_vmware.rw_handles [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52978378-76f7-07fa-d339-45478cbf7ce3/disk-0.vmdk due to incomplete transfer. [ 1023.484532] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-36d5297c-6144-42f4-a380-464616ee2d39 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.486760] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] VM already powered off {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1023.487146] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1023.487604] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1023.487975] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.488363] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1023.490492] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b6b27913-8857-40c3-a024-88ddd30c0c62 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.496957] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415943, 'name': Rename_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.498754] env[62522]: DEBUG oslo_vmware.rw_handles [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52978378-76f7-07fa-d339-45478cbf7ce3/disk-0.vmdk. {{(pid=62522) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1023.499170] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Uploaded image f9ed5f31-86db-46dd-b6c8-bd2cbb2f7fe8 to the Glance image server {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1023.501490] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Destroying the VM {{(pid=62522) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1023.502195] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c483d7a5-c608-4d86-968d-1e644165bb6d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.505502] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1023.505816] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1023.507025] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0f70d3a-584a-43cf-96de-081940a4cc16 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.510692] env[62522]: DEBUG oslo_vmware.api [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 1023.510692] env[62522]: value = "task-2415949" [ 1023.510692] env[62522]: _type = "Task" [ 1023.510692] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.516589] env[62522]: DEBUG oslo_vmware.api [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 1023.516589] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521bac2a-6207-577f-4048-ef06406b74c2" [ 1023.516589] env[62522]: _type = "Task" [ 1023.516589] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.524569] env[62522]: DEBUG oslo_vmware.api [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415949, 'name': Destroy_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.530173] env[62522]: DEBUG oslo_vmware.api [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521bac2a-6207-577f-4048-ef06406b74c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.540017] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415946, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072419} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.540380] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1023.541195] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0513795-a8be-4e75-8284-c5b9bfdfcc72 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.567315] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] 97f4c6ab-04de-4069-8ce0-1509c30ffb0f/97f4c6ab-04de-4069-8ce0-1509c30ffb0f.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1023.567653] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-91168157-4ab4-4392-892a-035d221b827f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.590807] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for the task: (returnval){ [ 1023.590807] env[62522]: value = "task-2415950" [ 1023.590807] env[62522]: _type = "Task" [ 1023.590807] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.603522] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415950, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.607021] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415947, 'name': ReconfigVM_Task, 'duration_secs': 0.389901} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.607021] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 4e9436df-c86b-429b-abc2-97f760858055/4e9436df-c86b-429b-abc2-97f760858055.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1023.607490] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0e7b6d73-a209-46ce-beba-b5328cd2a693 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.617117] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for the task: (returnval){ [ 1023.617117] env[62522]: value = "task-2415951" [ 1023.617117] env[62522]: _type = "Task" [ 1023.617117] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.627134] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415951, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.728397] env[62522]: DEBUG nova.compute.manager [req-55d35518-888d-4f54-bd60-64e16906feb6 req-91c57ee0-71c2-47d0-a095-e53a6fe67f8d service nova] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Received event network-vif-deleted-f98850e9-37f2-496a-8b2e-590c3d8b7f5e {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1023.728397] env[62522]: INFO nova.compute.manager [req-55d35518-888d-4f54-bd60-64e16906feb6 req-91c57ee0-71c2-47d0-a095-e53a6fe67f8d service nova] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Neutron deleted interface f98850e9-37f2-496a-8b2e-590c3d8b7f5e; detaching it from the instance and deleting it from the info cache [ 1023.728750] env[62522]: DEBUG nova.network.neutron [req-55d35518-888d-4f54-bd60-64e16906feb6 req-91c57ee0-71c2-47d0-a095-e53a6fe67f8d service nova] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.841411] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415948, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.937346] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fa085f28-b3b5-4d63-a19a-a9a819e80b50 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "917469c5-20be-4814-814f-a042415be021" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.134s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1023.938667] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c9a687ce-1be7-498e-8fa1-298298464cc8 tempest-ServerTagsTestJSON-1702556494 tempest-ServerTagsTestJSON-1702556494-project-member] Lock "895e6716-44cf-45b2-afd8-eaba71c32460" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.199s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1023.986443] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415943, 'name': Rename_Task, 'duration_secs': 1.252129} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.987154] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1023.987477] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8dfd1f92-461b-4555-b269-ae0f2924f974 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.994424] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for the task: (returnval){ [ 1023.994424] env[62522]: value = "task-2415952" [ 1023.994424] env[62522]: _type = "Task" [ 1023.994424] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.004348] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415952, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.027784] env[62522]: DEBUG oslo_vmware.api [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415949, 'name': Destroy_Task, 'duration_secs': 0.444098} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.028732] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Destroyed the VM [ 1024.029193] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Deleting Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1024.029454] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-37000904-975e-4f1a-9ca1-98415d9f4873 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.040432] env[62522]: DEBUG oslo_vmware.api [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521bac2a-6207-577f-4048-ef06406b74c2, 'name': SearchDatastore_Task, 'duration_secs': 0.015146} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.042376] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3df13f8-e2f4-4c17-9b72-53569ce7efc0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.047442] env[62522]: DEBUG oslo_vmware.api [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 1024.047442] env[62522]: value = "task-2415953" [ 1024.047442] env[62522]: _type = "Task" [ 1024.047442] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.049107] env[62522]: DEBUG oslo_vmware.api [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 1024.049107] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52559952-25f9-7eb2-ce70-90f9d42976c5" [ 1024.049107] env[62522]: _type = "Task" [ 1024.049107] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.064672] env[62522]: DEBUG oslo_vmware.api [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415953, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.072172] env[62522]: DEBUG oslo_vmware.api [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52559952-25f9-7eb2-ce70-90f9d42976c5, 'name': SearchDatastore_Task, 'duration_secs': 0.0096} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.072518] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1024.072893] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] bf44e269-0297-473e-b6ce-04a40d0ec1b4/2ee4561b-ba48-4f45-82f6-eac89be98290-rescue.vmdk. {{(pid=62522) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1024.073260] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-17c33bab-9606-4642-b164-65d3c0e79aca {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.085061] env[62522]: DEBUG oslo_vmware.api [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 1024.085061] env[62522]: value = "task-2415954" [ 1024.085061] env[62522]: _type = "Task" [ 1024.085061] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.100717] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415950, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.103782] env[62522]: DEBUG oslo_vmware.api [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415954, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.114171] env[62522]: DEBUG nova.network.neutron [req-32cd8855-817e-460f-b548-9d9836a79595 req-1df65e81-367d-40f7-aaa5-f76ccc546088 service nova] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Updated VIF entry in instance network info cache for port 9e10cc19-76da-49d9-80b6-068ce128a1b0. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1024.114477] env[62522]: DEBUG nova.network.neutron [req-32cd8855-817e-460f-b548-9d9836a79595 req-1df65e81-367d-40f7-aaa5-f76ccc546088 service nova] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Updating instance_info_cache with network_info: [{"id": "9e10cc19-76da-49d9-80b6-068ce128a1b0", "address": "fa:16:3e:3f:35:de", "network": {"id": "70e81afa-0eda-49c5-b072-e79b3c287468", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1715169983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff5da278d2be4ca983424c8291beadec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7654928b-7afe-42e3-a18d-68ecc775cefe", "external-id": "cl2-zone-807", "segmentation_id": 807, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e10cc19-76", "ovs_interfaceid": "9e10cc19-76da-49d9-80b6-068ce128a1b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1024.126364] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415951, 'name': Rename_Task, 'duration_secs': 0.284603} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.127237] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1024.127489] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-465752a5-35c9-4ae8-9280-30d2b13c74b0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.135588] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for the task: (returnval){ [ 1024.135588] env[62522]: value = "task-2415955" [ 1024.135588] env[62522]: _type = "Task" [ 1024.135588] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.144128] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415955, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.209694] env[62522]: DEBUG nova.network.neutron [-] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1024.231281] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c36c0df1-a032-4232-89fc-0351f8d611ad {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.242208] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-186aa80a-28f7-4cc4-b67a-b3d032d258c7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.284028] env[62522]: DEBUG nova.compute.manager [req-55d35518-888d-4f54-bd60-64e16906feb6 req-91c57ee0-71c2-47d0-a095-e53a6fe67f8d service nova] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Detach interface failed, port_id=f98850e9-37f2-496a-8b2e-590c3d8b7f5e, reason: Instance 4e27a87c-4891-4e69-a6fa-312b026bf11e could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1024.342945] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415948, 'name': CreateVM_Task, 'duration_secs': 0.528659} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.343249] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1024.343774] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1024.343941] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.344324] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1024.344592] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-acd97497-a4ba-4c15-b56a-419e106d2070 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.350187] env[62522]: DEBUG oslo_vmware.api [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 1024.350187] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a08a33-4c7e-2bce-29f1-42cb603d6573" [ 1024.350187] env[62522]: _type = "Task" [ 1024.350187] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.360015] env[62522]: DEBUG oslo_vmware.api [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a08a33-4c7e-2bce-29f1-42cb603d6573, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.459852] env[62522]: DEBUG nova.network.neutron [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Successfully updated port: 6adfce51-a4d5-4682-bee9-e6bea918aa38 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1024.507275] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415952, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.561021] env[62522]: DEBUG oslo_vmware.api [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415953, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.600141] env[62522]: DEBUG oslo_vmware.api [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415954, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.608467] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415950, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.621026] env[62522]: DEBUG oslo_concurrency.lockutils [req-32cd8855-817e-460f-b548-9d9836a79595 req-1df65e81-367d-40f7-aaa5-f76ccc546088 service nova] Releasing lock "refresh_cache-cabe40a0-8bd0-4d77-b949-298bd194fa42" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1024.648275] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415955, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.712965] env[62522]: INFO nova.compute.manager [-] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Took 1.34 seconds to deallocate network for instance. [ 1024.757410] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a22178aa-be7f-41c9-80f0-2a5e8556f29d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.768373] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4363f4a-8e17-43b3-8e43-b8c7ff5b1dc5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.812493] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-961013e9-6c82-4e4b-bd5a-8b5c1419dd8d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.823756] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daab310c-1841-4732-a020-23493e0a2fb2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.842730] env[62522]: DEBUG nova.compute.provider_tree [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1024.861732] env[62522]: DEBUG oslo_vmware.api [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a08a33-4c7e-2bce-29f1-42cb603d6573, 'name': SearchDatastore_Task, 'duration_secs': 0.010636} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.862043] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1024.862313] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1024.862550] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1024.862986] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.862986] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1024.863261] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-16222a39-a3e0-4184-90a0-da345608f749 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.874272] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1024.874460] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1024.875567] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a43989fe-cc71-4e8d-9717-1a867809fe8b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.884471] env[62522]: DEBUG oslo_vmware.api [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 1024.884471] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b61c5d-5906-ee36-492d-cb3fd7b7f8a7" [ 1024.884471] env[62522]: _type = "Task" [ 1024.884471] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.896759] env[62522]: DEBUG oslo_vmware.api [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b61c5d-5906-ee36-492d-cb3fd7b7f8a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.965032] env[62522]: DEBUG oslo_concurrency.lockutils [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "refresh_cache-3b2cd0b6-0c7a-411c-a7f5-64835f2179dc" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1024.965032] env[62522]: DEBUG oslo_concurrency.lockutils [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquired lock "refresh_cache-3b2cd0b6-0c7a-411c-a7f5-64835f2179dc" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.965032] env[62522]: DEBUG nova.network.neutron [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1025.009227] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415952, 'name': PowerOnVM_Task, 'duration_secs': 0.845899} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.010047] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1025.010160] env[62522]: INFO nova.compute.manager [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Took 11.29 seconds to spawn the instance on the hypervisor. [ 1025.011174] env[62522]: DEBUG nova.compute.manager [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1025.011576] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e016f7ca-e146-4d7c-9f6b-33ae50468595 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.061534] env[62522]: DEBUG oslo_vmware.api [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415953, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.100391] env[62522]: DEBUG oslo_vmware.api [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415954, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.107669] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415950, 'name': ReconfigVM_Task, 'duration_secs': 1.448262} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.107954] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Reconfigured VM instance instance-00000055 to attach disk [datastore2] 97f4c6ab-04de-4069-8ce0-1509c30ffb0f/97f4c6ab-04de-4069-8ce0-1509c30ffb0f.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1025.108664] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-afb32b9d-220f-42d9-9cb3-c314d7e4d745 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.117076] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for the task: (returnval){ [ 1025.117076] env[62522]: value = "task-2415956" [ 1025.117076] env[62522]: _type = "Task" [ 1025.117076] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.128239] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415956, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.149833] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415955, 'name': PowerOnVM_Task, 'duration_secs': 0.705524} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.150153] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1025.150371] env[62522]: INFO nova.compute.manager [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Took 8.93 seconds to spawn the instance on the hypervisor. [ 1025.151021] env[62522]: DEBUG nova.compute.manager [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1025.151506] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a60dbf14-9652-4c9b-9a62-7d77f73f4fed {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.221194] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.348218] env[62522]: DEBUG nova.scheduler.client.report [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1025.400239] env[62522]: DEBUG oslo_vmware.api [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b61c5d-5906-ee36-492d-cb3fd7b7f8a7, 'name': SearchDatastore_Task, 'duration_secs': 0.028285} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.401318] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea7b82f8-9ea2-4b24-91cd-d2b118b57f13 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.408620] env[62522]: DEBUG oslo_vmware.api [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 1025.408620] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f38bd9-cfcd-9372-691f-9a152614c215" [ 1025.408620] env[62522]: _type = "Task" [ 1025.408620] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.416109] env[62522]: DEBUG nova.compute.manager [req-5ae1eb81-7a3a-41f3-9df7-383209e4d4a6 req-86db5ac2-2fe7-4688-b8cb-206e28ed6f25 service nova] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Received event network-vif-plugged-6adfce51-a4d5-4682-bee9-e6bea918aa38 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1025.416109] env[62522]: DEBUG oslo_concurrency.lockutils [req-5ae1eb81-7a3a-41f3-9df7-383209e4d4a6 req-86db5ac2-2fe7-4688-b8cb-206e28ed6f25 service nova] Acquiring lock "3b2cd0b6-0c7a-411c-a7f5-64835f2179dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.416340] env[62522]: DEBUG oslo_concurrency.lockutils [req-5ae1eb81-7a3a-41f3-9df7-383209e4d4a6 req-86db5ac2-2fe7-4688-b8cb-206e28ed6f25 service nova] Lock "3b2cd0b6-0c7a-411c-a7f5-64835f2179dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.416517] env[62522]: DEBUG oslo_concurrency.lockutils [req-5ae1eb81-7a3a-41f3-9df7-383209e4d4a6 req-86db5ac2-2fe7-4688-b8cb-206e28ed6f25 service nova] Lock "3b2cd0b6-0c7a-411c-a7f5-64835f2179dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.416713] env[62522]: DEBUG nova.compute.manager [req-5ae1eb81-7a3a-41f3-9df7-383209e4d4a6 req-86db5ac2-2fe7-4688-b8cb-206e28ed6f25 service nova] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] No waiting events found dispatching network-vif-plugged-6adfce51-a4d5-4682-bee9-e6bea918aa38 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1025.416900] env[62522]: WARNING nova.compute.manager [req-5ae1eb81-7a3a-41f3-9df7-383209e4d4a6 req-86db5ac2-2fe7-4688-b8cb-206e28ed6f25 service nova] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Received unexpected event network-vif-plugged-6adfce51-a4d5-4682-bee9-e6bea918aa38 for instance with vm_state building and task_state spawning. [ 1025.417084] env[62522]: DEBUG nova.compute.manager [req-5ae1eb81-7a3a-41f3-9df7-383209e4d4a6 req-86db5ac2-2fe7-4688-b8cb-206e28ed6f25 service nova] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Received event network-changed-6adfce51-a4d5-4682-bee9-e6bea918aa38 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1025.417261] env[62522]: DEBUG nova.compute.manager [req-5ae1eb81-7a3a-41f3-9df7-383209e4d4a6 req-86db5ac2-2fe7-4688-b8cb-206e28ed6f25 service nova] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Refreshing instance network info cache due to event network-changed-6adfce51-a4d5-4682-bee9-e6bea918aa38. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1025.417427] env[62522]: DEBUG oslo_concurrency.lockutils [req-5ae1eb81-7a3a-41f3-9df7-383209e4d4a6 req-86db5ac2-2fe7-4688-b8cb-206e28ed6f25 service nova] Acquiring lock "refresh_cache-3b2cd0b6-0c7a-411c-a7f5-64835f2179dc" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1025.429170] env[62522]: DEBUG oslo_vmware.api [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f38bd9-cfcd-9372-691f-9a152614c215, 'name': SearchDatastore_Task, 'duration_secs': 0.012502} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.429724] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1025.429982] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] cabe40a0-8bd0-4d77-b949-298bd194fa42/cabe40a0-8bd0-4d77-b949-298bd194fa42.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1025.430286] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-807d5feb-f765-453e-b38e-7e760eedda9e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.439875] env[62522]: DEBUG oslo_vmware.api [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 1025.439875] env[62522]: value = "task-2415957" [ 1025.439875] env[62522]: _type = "Task" [ 1025.439875] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.448818] env[62522]: DEBUG oslo_vmware.api [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415957, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.528097] env[62522]: DEBUG nova.network.neutron [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1025.535112] env[62522]: INFO nova.compute.manager [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Took 16.40 seconds to build instance. [ 1025.564976] env[62522]: DEBUG oslo_vmware.api [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415953, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.600292] env[62522]: DEBUG oslo_vmware.api [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415954, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.341552} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.600565] env[62522]: INFO nova.virt.vmwareapi.ds_util [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] bf44e269-0297-473e-b6ce-04a40d0ec1b4/2ee4561b-ba48-4f45-82f6-eac89be98290-rescue.vmdk. [ 1025.602466] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ab2e15-d5d3-405c-9592-e5af75776a42 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.637320] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] bf44e269-0297-473e-b6ce-04a40d0ec1b4/2ee4561b-ba48-4f45-82f6-eac89be98290-rescue.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1025.643461] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9aa2e246-00b8-4bee-b007-d56fba5f0e96 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.672640] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415956, 'name': Rename_Task, 'duration_secs': 0.269177} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.676815] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1025.676815] env[62522]: DEBUG oslo_vmware.api [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 1025.676815] env[62522]: value = "task-2415958" [ 1025.676815] env[62522]: _type = "Task" [ 1025.676815] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.676815] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1a5b1548-21a8-4230-8987-0833a540a937 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.680482] env[62522]: INFO nova.compute.manager [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Took 16.51 seconds to build instance. [ 1025.694556] env[62522]: DEBUG oslo_vmware.api [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415958, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.697031] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for the task: (returnval){ [ 1025.697031] env[62522]: value = "task-2415959" [ 1025.697031] env[62522]: _type = "Task" [ 1025.697031] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.710441] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415959, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.845603] env[62522]: DEBUG nova.network.neutron [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Updating instance_info_cache with network_info: [{"id": "6adfce51-a4d5-4682-bee9-e6bea918aa38", "address": "fa:16:3e:ad:e9:03", "network": {"id": "2037da36-cd13-4cb1-95f4-08d1e174336c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1895994075-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00b27498c07344d1bf9cecefa0fca033", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6adfce51-a4", "ovs_interfaceid": "6adfce51-a4d5-4682-bee9-e6bea918aa38", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.857180] env[62522]: DEBUG oslo_concurrency.lockutils [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.468s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.858059] env[62522]: DEBUG nova.compute.manager [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1025.860560] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.640s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.860812] env[62522]: DEBUG nova.objects.instance [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lazy-loading 'resources' on Instance uuid 4e27a87c-4891-4e69-a6fa-312b026bf11e {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1025.955273] env[62522]: DEBUG oslo_vmware.api [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415957, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.503814} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.955410] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] cabe40a0-8bd0-4d77-b949-298bd194fa42/cabe40a0-8bd0-4d77-b949-298bd194fa42.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1025.955595] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1025.955860] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7c973ea7-d0d0-4a34-afbf-8fe3460be7e1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.964040] env[62522]: DEBUG oslo_vmware.api [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 1025.964040] env[62522]: value = "task-2415960" [ 1025.964040] env[62522]: _type = "Task" [ 1025.964040] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.973970] env[62522]: DEBUG oslo_vmware.api [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415960, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.037231] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lock "921c14c9-27fa-4eda-9831-6263ad0d6c57" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.911s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.063185] env[62522]: DEBUG oslo_vmware.api [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415953, 'name': RemoveSnapshot_Task, 'duration_secs': 1.566905} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.063185] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Deleted Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1026.063185] env[62522]: DEBUG nova.compute.manager [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1026.063647] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66399644-d963-4783-ae4a-2f5b9fcbd3b8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.186870] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lock "4e9436df-c86b-429b-abc2-97f760858055" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.021s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.191908] env[62522]: DEBUG oslo_vmware.api [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415958, 'name': ReconfigVM_Task, 'duration_secs': 0.49384} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.191908] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Reconfigured VM instance instance-00000033 to attach disk [datastore1] bf44e269-0297-473e-b6ce-04a40d0ec1b4/2ee4561b-ba48-4f45-82f6-eac89be98290-rescue.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1026.191908] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1cb1e7e-4f67-4f62-9579-16d9e1f50bd7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.225219] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db354e98-7da0-4eb4-bc46-723b95f3443a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.242648] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415959, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.244266] env[62522]: DEBUG oslo_vmware.api [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 1026.244266] env[62522]: value = "task-2415961" [ 1026.244266] env[62522]: _type = "Task" [ 1026.244266] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.254158] env[62522]: DEBUG oslo_vmware.api [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415961, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.348226] env[62522]: DEBUG oslo_concurrency.lockutils [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Releasing lock "refresh_cache-3b2cd0b6-0c7a-411c-a7f5-64835f2179dc" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1026.348661] env[62522]: DEBUG nova.compute.manager [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Instance network_info: |[{"id": "6adfce51-a4d5-4682-bee9-e6bea918aa38", "address": "fa:16:3e:ad:e9:03", "network": {"id": "2037da36-cd13-4cb1-95f4-08d1e174336c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1895994075-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00b27498c07344d1bf9cecefa0fca033", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6adfce51-a4", "ovs_interfaceid": "6adfce51-a4d5-4682-bee9-e6bea918aa38", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1026.349033] env[62522]: DEBUG oslo_concurrency.lockutils [req-5ae1eb81-7a3a-41f3-9df7-383209e4d4a6 req-86db5ac2-2fe7-4688-b8cb-206e28ed6f25 service nova] Acquired lock "refresh_cache-3b2cd0b6-0c7a-411c-a7f5-64835f2179dc" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.349318] env[62522]: DEBUG nova.network.neutron [req-5ae1eb81-7a3a-41f3-9df7-383209e4d4a6 req-86db5ac2-2fe7-4688-b8cb-206e28ed6f25 service nova] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Refreshing network info cache for port 6adfce51-a4d5-4682-bee9-e6bea918aa38 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1026.350615] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ad:e9:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f65996a3-f865-4492-9377-cd14ec8b3aae', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6adfce51-a4d5-4682-bee9-e6bea918aa38', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1026.358150] env[62522]: DEBUG oslo.service.loopingcall [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1026.359160] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1026.359505] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4fc60efc-aa53-4851-88d1-2ae2438672cd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.375740] env[62522]: DEBUG nova.compute.utils [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1026.380074] env[62522]: DEBUG nova.compute.manager [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1026.380258] env[62522]: DEBUG nova.network.neutron [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1026.390814] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1026.390814] env[62522]: value = "task-2415962" [ 1026.390814] env[62522]: _type = "Task" [ 1026.390814] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.402604] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415962, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.435009] env[62522]: DEBUG nova.policy [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '55ada09ff7054189ba8820a7b1963fd5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e1ca5dfb8f2d4b2e932679e017fe8b3e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1026.481630] env[62522]: DEBUG oslo_vmware.api [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415960, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.135245} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.481983] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1026.483513] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5590ee1e-3a35-4328-8dac-658065e0b440 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.514903] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] cabe40a0-8bd0-4d77-b949-298bd194fa42/cabe40a0-8bd0-4d77-b949-298bd194fa42.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1026.518303] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-57cc432e-a44c-4970-b53f-a1c509f6c202 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.544260] env[62522]: DEBUG oslo_vmware.api [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 1026.544260] env[62522]: value = "task-2415963" [ 1026.544260] env[62522]: _type = "Task" [ 1026.544260] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.555695] env[62522]: DEBUG oslo_vmware.api [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415963, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.577551] env[62522]: INFO nova.compute.manager [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Shelve offloading [ 1026.728087] env[62522]: DEBUG oslo_vmware.api [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415959, 'name': PowerOnVM_Task, 'duration_secs': 0.810278} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.728087] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1026.728385] env[62522]: INFO nova.compute.manager [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Took 8.48 seconds to spawn the instance on the hypervisor. [ 1026.728706] env[62522]: DEBUG nova.compute.manager [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1026.732156] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b1dd0c3-69c1-42e9-9e2b-8a00281250ac {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.758789] env[62522]: DEBUG oslo_vmware.api [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415961, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.816785] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be9baac6-282d-467e-93f2-1dce776b79f5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.826464] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e66b29e5-76a4-4888-90e3-2a5728dd5a9e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.858424] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e1883e-0a21-4869-884e-5abbe6cb8306 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.869432] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4fce690-7259-4d1d-a14b-1a5bfddb2fd6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.885848] env[62522]: DEBUG nova.compute.manager [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1026.888860] env[62522]: DEBUG nova.compute.provider_tree [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1026.905481] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415962, 'name': CreateVM_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.917567] env[62522]: DEBUG nova.network.neutron [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Successfully created port: 78283962-9062-464a-b1f4-a2319257559b {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1027.057846] env[62522]: DEBUG oslo_vmware.api [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415963, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.083835] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1027.084471] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-63e4a202-9cdb-4860-90f6-4ef39d8b5dd3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.094676] env[62522]: DEBUG oslo_vmware.api [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 1027.094676] env[62522]: value = "task-2415964" [ 1027.094676] env[62522]: _type = "Task" [ 1027.094676] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.104286] env[62522]: DEBUG oslo_vmware.api [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415964, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.105236] env[62522]: DEBUG nova.network.neutron [req-5ae1eb81-7a3a-41f3-9df7-383209e4d4a6 req-86db5ac2-2fe7-4688-b8cb-206e28ed6f25 service nova] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Updated VIF entry in instance network info cache for port 6adfce51-a4d5-4682-bee9-e6bea918aa38. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1027.106434] env[62522]: DEBUG nova.network.neutron [req-5ae1eb81-7a3a-41f3-9df7-383209e4d4a6 req-86db5ac2-2fe7-4688-b8cb-206e28ed6f25 service nova] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Updating instance_info_cache with network_info: [{"id": "6adfce51-a4d5-4682-bee9-e6bea918aa38", "address": "fa:16:3e:ad:e9:03", "network": {"id": "2037da36-cd13-4cb1-95f4-08d1e174336c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1895994075-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00b27498c07344d1bf9cecefa0fca033", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6adfce51-a4", "ovs_interfaceid": "6adfce51-a4d5-4682-bee9-e6bea918aa38", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.256574] env[62522]: INFO nova.compute.manager [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Took 18.05 seconds to build instance. [ 1027.262668] env[62522]: DEBUG oslo_vmware.api [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415961, 'name': ReconfigVM_Task, 'duration_secs': 0.658432} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.262668] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1027.262915] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b7a53343-8e0c-4a19-a1ce-6fed90931d59 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.274029] env[62522]: DEBUG oslo_vmware.api [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 1027.274029] env[62522]: value = "task-2415965" [ 1027.274029] env[62522]: _type = "Task" [ 1027.274029] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.291159] env[62522]: DEBUG oslo_vmware.api [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415965, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.396795] env[62522]: DEBUG nova.scheduler.client.report [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1027.413188] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415962, 'name': CreateVM_Task, 'duration_secs': 0.861772} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.413395] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1027.414208] env[62522]: DEBUG oslo_concurrency.lockutils [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1027.414370] env[62522]: DEBUG oslo_concurrency.lockutils [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.414745] env[62522]: DEBUG oslo_concurrency.lockutils [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1027.415024] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d0dd103-732c-43ce-81d0-0aa10f03af80 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.420489] env[62522]: DEBUG oslo_vmware.api [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1027.420489] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c7c420-ed75-2093-bf44-725928528e3e" [ 1027.420489] env[62522]: _type = "Task" [ 1027.420489] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.434042] env[62522]: DEBUG oslo_vmware.api [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c7c420-ed75-2093-bf44-725928528e3e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.556239] env[62522]: DEBUG oslo_vmware.api [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415963, 'name': ReconfigVM_Task, 'duration_secs': 0.860024} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.556592] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Reconfigured VM instance instance-00000056 to attach disk [datastore2] cabe40a0-8bd0-4d77-b949-298bd194fa42/cabe40a0-8bd0-4d77-b949-298bd194fa42.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1027.557300] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7edea6b2-8f13-4928-8bef-43f43b6f0197 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.566264] env[62522]: DEBUG oslo_vmware.api [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 1027.566264] env[62522]: value = "task-2415966" [ 1027.566264] env[62522]: _type = "Task" [ 1027.566264] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.582972] env[62522]: DEBUG oslo_vmware.api [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415966, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.605397] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] VM already powered off {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1027.605659] env[62522]: DEBUG nova.compute.manager [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1027.606429] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c62cd79-a5d5-4839-a731-89cf2adc3a95 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.609454] env[62522]: DEBUG oslo_concurrency.lockutils [req-5ae1eb81-7a3a-41f3-9df7-383209e4d4a6 req-86db5ac2-2fe7-4688-b8cb-206e28ed6f25 service nova] Releasing lock "refresh_cache-3b2cd0b6-0c7a-411c-a7f5-64835f2179dc" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1027.613885] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquiring lock "refresh_cache-04a9d357-d094-487b-8f09-2f7e0c35f0d7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1027.614082] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquired lock "refresh_cache-04a9d357-d094-487b-8f09-2f7e0c35f0d7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.614283] env[62522]: DEBUG nova.network.neutron [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1027.759425] env[62522]: DEBUG oslo_concurrency.lockutils [None req-37f7fdfe-9e0b-47a5-ae23-66327dcdd4ba tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lock "97f4c6ab-04de-4069-8ce0-1509c30ffb0f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.565s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.785502] env[62522]: DEBUG oslo_vmware.api [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415965, 'name': PowerOnVM_Task} progress is 74%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.903111] env[62522]: DEBUG nova.compute.manager [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1027.908191] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.048s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.935781] env[62522]: DEBUG oslo_vmware.api [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c7c420-ed75-2093-bf44-725928528e3e, 'name': SearchDatastore_Task, 'duration_secs': 0.015335} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.938965] env[62522]: DEBUG nova.virt.hardware [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1027.939167] env[62522]: DEBUG nova.virt.hardware [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1027.939337] env[62522]: DEBUG nova.virt.hardware [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1027.939809] env[62522]: DEBUG nova.virt.hardware [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1027.940033] env[62522]: DEBUG nova.virt.hardware [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1027.940234] env[62522]: DEBUG nova.virt.hardware [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1027.940702] env[62522]: DEBUG nova.virt.hardware [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1027.940769] env[62522]: DEBUG nova.virt.hardware [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1027.940969] env[62522]: DEBUG nova.virt.hardware [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1027.941458] env[62522]: DEBUG nova.virt.hardware [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1027.941910] env[62522]: DEBUG nova.virt.hardware [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1027.942483] env[62522]: DEBUG oslo_concurrency.lockutils [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1027.942772] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1027.943227] env[62522]: DEBUG oslo_concurrency.lockutils [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1027.943426] env[62522]: DEBUG oslo_concurrency.lockutils [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.943719] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1027.944709] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c2a2fa8-2c56-4763-b14b-1ef9dfdbe8cf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.948928] env[62522]: INFO nova.scheduler.client.report [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Deleted allocations for instance 4e27a87c-4891-4e69-a6fa-312b026bf11e [ 1027.949939] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f0883127-60a9-4bf7-87fe-450b55a34c86 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.966976] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8718c776-3c1f-4665-909a-c408ee042811 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.974702] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1027.974997] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1027.988022] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e4ba8e0-d880-49b3-8fab-f825fc0a47ad {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.997296] env[62522]: DEBUG oslo_vmware.api [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1027.997296] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d1bba1-cabc-6a47-06c7-9e56711f9b51" [ 1027.997296] env[62522]: _type = "Task" [ 1027.997296] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.010526] env[62522]: DEBUG oslo_vmware.api [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d1bba1-cabc-6a47-06c7-9e56711f9b51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.077974] env[62522]: DEBUG oslo_vmware.api [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415966, 'name': Rename_Task, 'duration_secs': 0.265623} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.078395] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1028.078752] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8e29e8e6-c87a-45c0-8ad7-d0ddd8b767a7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.088459] env[62522]: DEBUG oslo_vmware.api [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 1028.088459] env[62522]: value = "task-2415967" [ 1028.088459] env[62522]: _type = "Task" [ 1028.088459] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.102091] env[62522]: DEBUG oslo_vmware.api [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415967, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.286177] env[62522]: DEBUG oslo_vmware.api [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415965, 'name': PowerOnVM_Task, 'duration_secs': 0.836123} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.286412] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1028.288921] env[62522]: DEBUG nova.compute.manager [None req-2a9db339-011f-41f3-b97c-fe472c43bab6 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1028.289771] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3ebf950-8cb9-4df3-b79c-547e78b6504c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.376027] env[62522]: DEBUG nova.network.neutron [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Updating instance_info_cache with network_info: [{"id": "7e36641e-fc4a-4223-ab07-33dc49821168", "address": "fa:16:3e:f1:bf:49", "network": {"id": "b837f0fb-c2e1-46dd-93b2-62d6c4352316", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1813744063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed393a0454b643eea75c203d1dfd592c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e36641e-fc", "ovs_interfaceid": "7e36641e-fc4a-4223-ab07-33dc49821168", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.465034] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3e174a6f-0bb5-4e93-bc19-8b4d221e6a46 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "4e27a87c-4891-4e69-a6fa-312b026bf11e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.782s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.512511] env[62522]: DEBUG oslo_vmware.api [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d1bba1-cabc-6a47-06c7-9e56711f9b51, 'name': SearchDatastore_Task, 'duration_secs': 0.033313} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.513450] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40f0ffe3-be23-491a-a456-c774181ef3e1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.520377] env[62522]: DEBUG oslo_vmware.api [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1028.520377] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52269d3f-3fd5-1efb-d3fa-5c768ba3290f" [ 1028.520377] env[62522]: _type = "Task" [ 1028.520377] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.527151] env[62522]: DEBUG nova.compute.manager [req-f2a9566a-2227-4c1d-89e9-812afef57ff7 req-359824e7-3740-4410-8d0f-2afc05a00c30 service nova] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Received event network-vif-plugged-78283962-9062-464a-b1f4-a2319257559b {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1028.527151] env[62522]: DEBUG oslo_concurrency.lockutils [req-f2a9566a-2227-4c1d-89e9-812afef57ff7 req-359824e7-3740-4410-8d0f-2afc05a00c30 service nova] Acquiring lock "5c9b1120-84ad-48d5-8cd4-0cf387963066-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.527151] env[62522]: DEBUG oslo_concurrency.lockutils [req-f2a9566a-2227-4c1d-89e9-812afef57ff7 req-359824e7-3740-4410-8d0f-2afc05a00c30 service nova] Lock "5c9b1120-84ad-48d5-8cd4-0cf387963066-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.527151] env[62522]: DEBUG oslo_concurrency.lockutils [req-f2a9566a-2227-4c1d-89e9-812afef57ff7 req-359824e7-3740-4410-8d0f-2afc05a00c30 service nova] Lock "5c9b1120-84ad-48d5-8cd4-0cf387963066-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.527151] env[62522]: DEBUG nova.compute.manager [req-f2a9566a-2227-4c1d-89e9-812afef57ff7 req-359824e7-3740-4410-8d0f-2afc05a00c30 service nova] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] No waiting events found dispatching network-vif-plugged-78283962-9062-464a-b1f4-a2319257559b {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1028.527435] env[62522]: WARNING nova.compute.manager [req-f2a9566a-2227-4c1d-89e9-812afef57ff7 req-359824e7-3740-4410-8d0f-2afc05a00c30 service nova] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Received unexpected event network-vif-plugged-78283962-9062-464a-b1f4-a2319257559b for instance with vm_state building and task_state spawning. [ 1028.531082] env[62522]: DEBUG oslo_vmware.api [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52269d3f-3fd5-1efb-d3fa-5c768ba3290f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.600019] env[62522]: DEBUG oslo_vmware.api [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415967, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.692168] env[62522]: DEBUG nova.network.neutron [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Successfully updated port: 78283962-9062-464a-b1f4-a2319257559b {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1028.768917] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquiring lock "921c14c9-27fa-4eda-9831-6263ad0d6c57" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.769370] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lock "921c14c9-27fa-4eda-9831-6263ad0d6c57" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.769624] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquiring lock "921c14c9-27fa-4eda-9831-6263ad0d6c57-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.769819] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lock "921c14c9-27fa-4eda-9831-6263ad0d6c57-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.769991] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lock "921c14c9-27fa-4eda-9831-6263ad0d6c57-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.772393] env[62522]: INFO nova.compute.manager [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Terminating instance [ 1028.875415] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Releasing lock "refresh_cache-04a9d357-d094-487b-8f09-2f7e0c35f0d7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1029.036238] env[62522]: DEBUG oslo_vmware.api [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52269d3f-3fd5-1efb-d3fa-5c768ba3290f, 'name': SearchDatastore_Task, 'duration_secs': 0.03984} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.039952] env[62522]: DEBUG oslo_concurrency.lockutils [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1029.039952] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc/3b2cd0b6-0c7a-411c-a7f5-64835f2179dc.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1029.042803] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a5941c58-6116-45df-bb11-d84078064a16 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.056809] env[62522]: DEBUG oslo_vmware.api [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1029.056809] env[62522]: value = "task-2415968" [ 1029.056809] env[62522]: _type = "Task" [ 1029.056809] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.073895] env[62522]: DEBUG oslo_vmware.api [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415968, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.100644] env[62522]: DEBUG oslo_vmware.api [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415967, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.201206] env[62522]: DEBUG oslo_concurrency.lockutils [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquiring lock "refresh_cache-5c9b1120-84ad-48d5-8cd4-0cf387963066" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1029.203395] env[62522]: DEBUG oslo_concurrency.lockutils [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquired lock "refresh_cache-5c9b1120-84ad-48d5-8cd4-0cf387963066" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.203395] env[62522]: DEBUG nova.network.neutron [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1029.276597] env[62522]: DEBUG nova.compute.manager [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1029.276826] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1029.278980] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f3c413d-2234-4e37-9702-023e0a448004 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.288732] env[62522]: DEBUG oslo_concurrency.lockutils [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "e1225c6f-9025-41ff-94fa-a55af49aeed2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.288990] env[62522]: DEBUG oslo_concurrency.lockutils [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "e1225c6f-9025-41ff-94fa-a55af49aeed2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.289241] env[62522]: DEBUG oslo_concurrency.lockutils [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "e1225c6f-9025-41ff-94fa-a55af49aeed2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.289431] env[62522]: DEBUG oslo_concurrency.lockutils [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "e1225c6f-9025-41ff-94fa-a55af49aeed2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.289738] env[62522]: DEBUG oslo_concurrency.lockutils [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "e1225c6f-9025-41ff-94fa-a55af49aeed2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.291490] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1029.292011] env[62522]: INFO nova.compute.manager [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Terminating instance [ 1029.293566] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-81518dff-50de-4bc4-95c1-2f79c5b60fed {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.303569] env[62522]: DEBUG oslo_vmware.api [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for the task: (returnval){ [ 1029.303569] env[62522]: value = "task-2415969" [ 1029.303569] env[62522]: _type = "Task" [ 1029.303569] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.316985] env[62522]: DEBUG oslo_vmware.api [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415969, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.372895] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1029.374108] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-254d3ca9-b6e6-44ff-8ec4-295a43a481f1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.385298] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1029.385617] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3c8c41c5-f28f-4596-b35d-75444fb8709b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.485468] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1029.485847] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1029.486169] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Deleting the datastore file [datastore1] 04a9d357-d094-487b-8f09-2f7e0c35f0d7 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1029.486942] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b696b6ea-4f22-4c00-b235-548c07401c37 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.495839] env[62522]: DEBUG oslo_vmware.api [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 1029.495839] env[62522]: value = "task-2415971" [ 1029.495839] env[62522]: _type = "Task" [ 1029.495839] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.506465] env[62522]: DEBUG oslo_vmware.api [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415971, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.550943] env[62522]: INFO nova.compute.manager [None req-ae51399e-03de-4a12-921d-27394b2dc013 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Unrescuing [ 1029.550943] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ae51399e-03de-4a12-921d-27394b2dc013 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "refresh_cache-bf44e269-0297-473e-b6ce-04a40d0ec1b4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1029.550943] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ae51399e-03de-4a12-921d-27394b2dc013 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquired lock "refresh_cache-bf44e269-0297-473e-b6ce-04a40d0ec1b4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.550943] env[62522]: DEBUG nova.network.neutron [None req-ae51399e-03de-4a12-921d-27394b2dc013 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1029.573924] env[62522]: DEBUG oslo_vmware.api [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415968, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.600970] env[62522]: DEBUG oslo_vmware.api [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2415967, 'name': PowerOnVM_Task, 'duration_secs': 1.042626} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.601345] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1029.601568] env[62522]: INFO nova.compute.manager [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Took 8.84 seconds to spawn the instance on the hypervisor. [ 1029.601758] env[62522]: DEBUG nova.compute.manager [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1029.602629] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d111362d-ea09-4d0e-909b-a267432333e7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.786484] env[62522]: DEBUG nova.network.neutron [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1029.797793] env[62522]: DEBUG nova.compute.manager [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1029.797939] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1029.798830] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c30106f-33bd-4e6c-bfb4-4aba92fc1f51 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.809938] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1029.810830] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0aca9250-a7f6-4633-a922-576def39234e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.822018] env[62522]: DEBUG oslo_vmware.api [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415969, 'name': PowerOffVM_Task, 'duration_secs': 0.312412} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.822018] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1029.822018] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1029.822018] env[62522]: DEBUG oslo_vmware.api [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 1029.822018] env[62522]: value = "task-2415972" [ 1029.822018] env[62522]: _type = "Task" [ 1029.822018] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.825045] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-22544aa1-4dff-4831-b405-0da45e58a581 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.835422] env[62522]: DEBUG oslo_vmware.api [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415972, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.922170] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1029.922433] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1029.922617] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Deleting the datastore file [datastore1] 921c14c9-27fa-4eda-9831-6263ad0d6c57 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1029.922891] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e594addf-5073-46c2-a165-23245b55923b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.931554] env[62522]: DEBUG oslo_vmware.api [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for the task: (returnval){ [ 1029.931554] env[62522]: value = "task-2415974" [ 1029.931554] env[62522]: _type = "Task" [ 1029.931554] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.948273] env[62522]: DEBUG oslo_vmware.api [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415974, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.012140] env[62522]: DEBUG oslo_vmware.api [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2415971, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.375222} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.012140] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1030.012140] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1030.012140] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1030.040621] env[62522]: DEBUG nova.network.neutron [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Updating instance_info_cache with network_info: [{"id": "78283962-9062-464a-b1f4-a2319257559b", "address": "fa:16:3e:6a:27:1b", "network": {"id": "4b04f6e1-0714-469b-9941-be6f5b6128d0", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-292476225-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e1ca5dfb8f2d4b2e932679e017fe8b3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78283962-90", "ovs_interfaceid": "78283962-9062-464a-b1f4-a2319257559b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.046208] env[62522]: INFO nova.scheduler.client.report [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Deleted allocations for instance 04a9d357-d094-487b-8f09-2f7e0c35f0d7 [ 1030.072767] env[62522]: DEBUG oslo_vmware.api [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415968, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.599757} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.073719] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc/3b2cd0b6-0c7a-411c-a7f5-64835f2179dc.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1030.073719] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1030.073966] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-57ab73c6-4e50-489a-9101-647a42c3aa25 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.083075] env[62522]: DEBUG oslo_vmware.api [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1030.083075] env[62522]: value = "task-2415975" [ 1030.083075] env[62522]: _type = "Task" [ 1030.083075] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.098323] env[62522]: DEBUG oslo_vmware.api [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415975, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.108685] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "1a5a235a-477f-4da5-b5c1-ee057211cce8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.108969] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "1a5a235a-477f-4da5-b5c1-ee057211cce8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.128726] env[62522]: INFO nova.compute.manager [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Took 18.83 seconds to build instance. [ 1030.306599] env[62522]: DEBUG nova.network.neutron [None req-ae51399e-03de-4a12-921d-27394b2dc013 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Updating instance_info_cache with network_info: [{"id": "36fe2fd3-3447-4032-8c02-5be9712b769d", "address": "fa:16:3e:2e:5d:25", "network": {"id": "5f1d73d1-ff9e-4081-87cf-8df6294f67c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-892212702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "962664c996f24cf9ae192f79fae18ca4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "419a5b3f-4c6f-4168-9def-746b4d8c5c24", "external-id": "nsx-vlan-transportzone-656", "segmentation_id": 656, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36fe2fd3-34", "ovs_interfaceid": "36fe2fd3-3447-4032-8c02-5be9712b769d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.337687] env[62522]: DEBUG oslo_vmware.api [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415972, 'name': PowerOffVM_Task, 'duration_secs': 0.219601} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.338139] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1030.338440] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1030.338813] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dc5b78c1-1555-4bf7-b19e-01175cc804da {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.442052] env[62522]: DEBUG oslo_vmware.api [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415974, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.40943} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.443396] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1030.443653] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1030.443886] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1030.444117] env[62522]: INFO nova.compute.manager [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1030.444422] env[62522]: DEBUG oslo.service.loopingcall [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1030.444680] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1030.444904] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1030.445164] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Deleting the datastore file [datastore2] e1225c6f-9025-41ff-94fa-a55af49aeed2 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1030.446487] env[62522]: DEBUG nova.compute.manager [-] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1030.446487] env[62522]: DEBUG nova.network.neutron [-] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1030.447570] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d5d32e98-1521-43af-b199-2a77d67a5d67 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.458421] env[62522]: DEBUG oslo_vmware.api [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 1030.458421] env[62522]: value = "task-2415977" [ 1030.458421] env[62522]: _type = "Task" [ 1030.458421] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.467980] env[62522]: DEBUG oslo_vmware.api [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415977, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.546500] env[62522]: DEBUG oslo_concurrency.lockutils [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Releasing lock "refresh_cache-5c9b1120-84ad-48d5-8cd4-0cf387963066" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1030.546931] env[62522]: DEBUG nova.compute.manager [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Instance network_info: |[{"id": "78283962-9062-464a-b1f4-a2319257559b", "address": "fa:16:3e:6a:27:1b", "network": {"id": "4b04f6e1-0714-469b-9941-be6f5b6128d0", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-292476225-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e1ca5dfb8f2d4b2e932679e017fe8b3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78283962-90", "ovs_interfaceid": "78283962-9062-464a-b1f4-a2319257559b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1030.547465] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6a:27:1b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8459aaf-d6a8-46fb-ad14-464ac3104695', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '78283962-9062-464a-b1f4-a2319257559b', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1030.555686] env[62522]: DEBUG oslo.service.loopingcall [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1030.556704] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.556973] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.557251] env[62522]: DEBUG nova.objects.instance [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lazy-loading 'resources' on Instance uuid 04a9d357-d094-487b-8f09-2f7e0c35f0d7 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1030.558259] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1030.558503] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9bcb1c50-bf74-41cd-b089-836fcc49e64e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.576851] env[62522]: DEBUG nova.objects.instance [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lazy-loading 'numa_topology' on Instance uuid 04a9d357-d094-487b-8f09-2f7e0c35f0d7 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1030.579040] env[62522]: DEBUG nova.compute.manager [req-be8fb051-c855-4752-8f80-1cf90bceebce req-9f54e6f8-2c1f-4f18-9b56-9b43572bd3f1 service nova] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Received event network-changed-78283962-9062-464a-b1f4-a2319257559b {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1030.579230] env[62522]: DEBUG nova.compute.manager [req-be8fb051-c855-4752-8f80-1cf90bceebce req-9f54e6f8-2c1f-4f18-9b56-9b43572bd3f1 service nova] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Refreshing instance network info cache due to event network-changed-78283962-9062-464a-b1f4-a2319257559b. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1030.579439] env[62522]: DEBUG oslo_concurrency.lockutils [req-be8fb051-c855-4752-8f80-1cf90bceebce req-9f54e6f8-2c1f-4f18-9b56-9b43572bd3f1 service nova] Acquiring lock "refresh_cache-5c9b1120-84ad-48d5-8cd4-0cf387963066" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1030.579583] env[62522]: DEBUG oslo_concurrency.lockutils [req-be8fb051-c855-4752-8f80-1cf90bceebce req-9f54e6f8-2c1f-4f18-9b56-9b43572bd3f1 service nova] Acquired lock "refresh_cache-5c9b1120-84ad-48d5-8cd4-0cf387963066" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.579744] env[62522]: DEBUG nova.network.neutron [req-be8fb051-c855-4752-8f80-1cf90bceebce req-9f54e6f8-2c1f-4f18-9b56-9b43572bd3f1 service nova] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Refreshing network info cache for port 78283962-9062-464a-b1f4-a2319257559b {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1030.590816] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1030.590816] env[62522]: value = "task-2415978" [ 1030.590816] env[62522]: _type = "Task" [ 1030.590816] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.595383] env[62522]: DEBUG oslo_vmware.api [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415975, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.181772} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.599901] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1030.599901] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fa34bf2-9ea8-454d-bca9-de5777d581ef {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.608063] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415978, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.617738] env[62522]: DEBUG nova.compute.manager [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1030.628764] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Reconfiguring VM instance instance-00000057 to attach disk [datastore2] 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc/3b2cd0b6-0c7a-411c-a7f5-64835f2179dc.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1030.629661] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-00308d3d-8e4a-4191-8df5-cb1422631116 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.644518] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9e454e10-39fc-43c9-af90-b39ad771dda3 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "cabe40a0-8bd0-4d77-b949-298bd194fa42" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.351s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.651852] env[62522]: DEBUG oslo_vmware.api [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1030.651852] env[62522]: value = "task-2415979" [ 1030.651852] env[62522]: _type = "Task" [ 1030.651852] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.660817] env[62522]: DEBUG oslo_vmware.api [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415979, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.809626] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ae51399e-03de-4a12-921d-27394b2dc013 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Releasing lock "refresh_cache-bf44e269-0297-473e-b6ce-04a40d0ec1b4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1030.810616] env[62522]: DEBUG nova.objects.instance [None req-ae51399e-03de-4a12-921d-27394b2dc013 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lazy-loading 'flavor' on Instance uuid bf44e269-0297-473e-b6ce-04a40d0ec1b4 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1030.968999] env[62522]: DEBUG oslo_vmware.api [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2415977, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.284806} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.969320] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1030.969462] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1030.969641] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1030.969814] env[62522]: INFO nova.compute.manager [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1030.970076] env[62522]: DEBUG oslo.service.loopingcall [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1030.970278] env[62522]: DEBUG nova.compute.manager [-] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1030.970404] env[62522]: DEBUG nova.network.neutron [-] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1031.084213] env[62522]: DEBUG nova.objects.base [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Object Instance<04a9d357-d094-487b-8f09-2f7e0c35f0d7> lazy-loaded attributes: resources,numa_topology {{(pid=62522) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1031.106565] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415978, 'name': CreateVM_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.161968] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.170545] env[62522]: DEBUG oslo_vmware.api [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415979, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.300849] env[62522]: DEBUG nova.network.neutron [-] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.316461] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc5ac421-b486-480f-93c6-d2d2a749cb8c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.347156] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae51399e-03de-4a12-921d-27394b2dc013 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1031.351134] env[62522]: DEBUG nova.network.neutron [req-be8fb051-c855-4752-8f80-1cf90bceebce req-9f54e6f8-2c1f-4f18-9b56-9b43572bd3f1 service nova] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Updated VIF entry in instance network info cache for port 78283962-9062-464a-b1f4-a2319257559b. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1031.351529] env[62522]: DEBUG nova.network.neutron [req-be8fb051-c855-4752-8f80-1cf90bceebce req-9f54e6f8-2c1f-4f18-9b56-9b43572bd3f1 service nova] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Updating instance_info_cache with network_info: [{"id": "78283962-9062-464a-b1f4-a2319257559b", "address": "fa:16:3e:6a:27:1b", "network": {"id": "4b04f6e1-0714-469b-9941-be6f5b6128d0", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-292476225-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e1ca5dfb8f2d4b2e932679e017fe8b3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78283962-90", "ovs_interfaceid": "78283962-9062-464a-b1f4-a2319257559b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.352979] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ce1e3947-e991-4d01-9c38-7d38c2f2b642 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.367461] env[62522]: DEBUG oslo_vmware.api [None req-ae51399e-03de-4a12-921d-27394b2dc013 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 1031.367461] env[62522]: value = "task-2415980" [ 1031.367461] env[62522]: _type = "Task" [ 1031.367461] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.376760] env[62522]: DEBUG oslo_vmware.api [None req-ae51399e-03de-4a12-921d-27394b2dc013 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415980, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.478218] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19796427-a5d6-4e1b-a87c-72d03d76337e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.488423] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcacc335-6e35-4d44-a9d9-a0bcf88c24c8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.538106] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4ea722f-adfd-4812-bc56-2e0ecdc8a113 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.551442] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4811dfb-da73-4f65-849e-7296b38b0e2e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.567598] env[62522]: DEBUG nova.compute.provider_tree [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1031.606428] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415978, 'name': CreateVM_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.668046] env[62522]: DEBUG oslo_vmware.api [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415979, 'name': ReconfigVM_Task, 'duration_secs': 0.893575} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.668046] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Reconfigured VM instance instance-00000057 to attach disk [datastore2] 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc/3b2cd0b6-0c7a-411c-a7f5-64835f2179dc.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1031.668046] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b2bb585f-bd1c-436b-859e-1b4145139fec {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.676420] env[62522]: DEBUG oslo_vmware.api [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1031.676420] env[62522]: value = "task-2415981" [ 1031.676420] env[62522]: _type = "Task" [ 1031.676420] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.687384] env[62522]: DEBUG oslo_vmware.api [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415981, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.805675] env[62522]: INFO nova.compute.manager [-] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Took 1.36 seconds to deallocate network for instance. [ 1031.856888] env[62522]: DEBUG oslo_concurrency.lockutils [req-be8fb051-c855-4752-8f80-1cf90bceebce req-9f54e6f8-2c1f-4f18-9b56-9b43572bd3f1 service nova] Releasing lock "refresh_cache-5c9b1120-84ad-48d5-8cd4-0cf387963066" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1031.857185] env[62522]: DEBUG nova.compute.manager [req-be8fb051-c855-4752-8f80-1cf90bceebce req-9f54e6f8-2c1f-4f18-9b56-9b43572bd3f1 service nova] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Received event network-vif-unplugged-7e36641e-fc4a-4223-ab07-33dc49821168 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1031.857379] env[62522]: DEBUG oslo_concurrency.lockutils [req-be8fb051-c855-4752-8f80-1cf90bceebce req-9f54e6f8-2c1f-4f18-9b56-9b43572bd3f1 service nova] Acquiring lock "04a9d357-d094-487b-8f09-2f7e0c35f0d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.857574] env[62522]: DEBUG oslo_concurrency.lockutils [req-be8fb051-c855-4752-8f80-1cf90bceebce req-9f54e6f8-2c1f-4f18-9b56-9b43572bd3f1 service nova] Lock "04a9d357-d094-487b-8f09-2f7e0c35f0d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.857727] env[62522]: DEBUG oslo_concurrency.lockutils [req-be8fb051-c855-4752-8f80-1cf90bceebce req-9f54e6f8-2c1f-4f18-9b56-9b43572bd3f1 service nova] Lock "04a9d357-d094-487b-8f09-2f7e0c35f0d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.861022] env[62522]: DEBUG nova.compute.manager [req-be8fb051-c855-4752-8f80-1cf90bceebce req-9f54e6f8-2c1f-4f18-9b56-9b43572bd3f1 service nova] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] No waiting events found dispatching network-vif-unplugged-7e36641e-fc4a-4223-ab07-33dc49821168 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1031.861022] env[62522]: WARNING nova.compute.manager [req-be8fb051-c855-4752-8f80-1cf90bceebce req-9f54e6f8-2c1f-4f18-9b56-9b43572bd3f1 service nova] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Received unexpected event network-vif-unplugged-7e36641e-fc4a-4223-ab07-33dc49821168 for instance with vm_state shelved_offloaded and task_state None. [ 1031.861022] env[62522]: DEBUG nova.compute.manager [req-be8fb051-c855-4752-8f80-1cf90bceebce req-9f54e6f8-2c1f-4f18-9b56-9b43572bd3f1 service nova] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Received event network-changed-7e36641e-fc4a-4223-ab07-33dc49821168 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1031.861022] env[62522]: DEBUG nova.compute.manager [req-be8fb051-c855-4752-8f80-1cf90bceebce req-9f54e6f8-2c1f-4f18-9b56-9b43572bd3f1 service nova] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Refreshing instance network info cache due to event network-changed-7e36641e-fc4a-4223-ab07-33dc49821168. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1031.861022] env[62522]: DEBUG oslo_concurrency.lockutils [req-be8fb051-c855-4752-8f80-1cf90bceebce req-9f54e6f8-2c1f-4f18-9b56-9b43572bd3f1 service nova] Acquiring lock "refresh_cache-04a9d357-d094-487b-8f09-2f7e0c35f0d7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1031.861022] env[62522]: DEBUG oslo_concurrency.lockutils [req-be8fb051-c855-4752-8f80-1cf90bceebce req-9f54e6f8-2c1f-4f18-9b56-9b43572bd3f1 service nova] Acquired lock "refresh_cache-04a9d357-d094-487b-8f09-2f7e0c35f0d7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.861022] env[62522]: DEBUG nova.network.neutron [req-be8fb051-c855-4752-8f80-1cf90bceebce req-9f54e6f8-2c1f-4f18-9b56-9b43572bd3f1 service nova] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Refreshing network info cache for port 7e36641e-fc4a-4223-ab07-33dc49821168 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1031.882457] env[62522]: DEBUG oslo_vmware.api [None req-ae51399e-03de-4a12-921d-27394b2dc013 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415980, 'name': PowerOffVM_Task, 'duration_secs': 0.278526} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.882740] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae51399e-03de-4a12-921d-27394b2dc013 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1031.890683] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae51399e-03de-4a12-921d-27394b2dc013 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Reconfiguring VM instance instance-00000033 to detach disk 2002 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1031.894090] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8c475a0-0d69-42ab-80e3-ee92d602b9a5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.912300] env[62522]: DEBUG oslo_vmware.api [None req-ae51399e-03de-4a12-921d-27394b2dc013 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 1031.912300] env[62522]: value = "task-2415982" [ 1031.912300] env[62522]: _type = "Task" [ 1031.912300] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.922447] env[62522]: DEBUG oslo_vmware.api [None req-ae51399e-03de-4a12-921d-27394b2dc013 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415982, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.947367] env[62522]: DEBUG nova.network.neutron [-] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.073345] env[62522]: DEBUG nova.scheduler.client.report [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1032.106567] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415978, 'name': CreateVM_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.190120] env[62522]: DEBUG oslo_vmware.api [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415981, 'name': Rename_Task, 'duration_secs': 0.20626} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.190971] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1032.191279] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6c5f20da-a033-4709-b0e5-aa3894ea9a01 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.201996] env[62522]: DEBUG oslo_vmware.api [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1032.201996] env[62522]: value = "task-2415983" [ 1032.201996] env[62522]: _type = "Task" [ 1032.201996] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.212746] env[62522]: DEBUG oslo_vmware.api [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415983, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.312094] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.423631] env[62522]: DEBUG oslo_vmware.api [None req-ae51399e-03de-4a12-921d-27394b2dc013 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415982, 'name': ReconfigVM_Task, 'duration_secs': 0.253167} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.423955] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae51399e-03de-4a12-921d-27394b2dc013 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Reconfigured VM instance instance-00000033 to detach disk 2002 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1032.424155] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae51399e-03de-4a12-921d-27394b2dc013 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1032.424420] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-90e970cb-311b-49c9-9a3b-a09ec66b7150 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.433904] env[62522]: DEBUG oslo_vmware.api [None req-ae51399e-03de-4a12-921d-27394b2dc013 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 1032.433904] env[62522]: value = "task-2415984" [ 1032.433904] env[62522]: _type = "Task" [ 1032.433904] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.445033] env[62522]: DEBUG oslo_vmware.api [None req-ae51399e-03de-4a12-921d-27394b2dc013 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415984, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.450875] env[62522]: INFO nova.compute.manager [-] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Took 1.48 seconds to deallocate network for instance. [ 1032.576941] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.020s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.583150] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.418s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.587020] env[62522]: INFO nova.compute.claims [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1032.604718] env[62522]: DEBUG nova.compute.manager [req-8b2bd8d5-c49e-47c2-b6e2-2111f9c4e1cb req-d31b9045-4282-4b50-aaa4-12ac37ae0b67 service nova] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Received event network-vif-deleted-220c0c0d-f275-4f95-b1da-4d8f576166c8 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1032.604977] env[62522]: DEBUG nova.compute.manager [req-8b2bd8d5-c49e-47c2-b6e2-2111f9c4e1cb req-d31b9045-4282-4b50-aaa4-12ac37ae0b67 service nova] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Received event network-vif-deleted-5ffc472e-5334-485a-9155-fed81971c096 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1032.611519] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415978, 'name': CreateVM_Task, 'duration_secs': 1.886479} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.614379] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1032.615382] env[62522]: DEBUG oslo_concurrency.lockutils [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1032.615634] env[62522]: DEBUG oslo_concurrency.lockutils [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.615988] env[62522]: DEBUG oslo_concurrency.lockutils [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1032.616602] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b360d36a-b47a-435c-9c87-f63229cedaab {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.623323] env[62522]: DEBUG oslo_vmware.api [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1032.623323] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52db1e18-58a1-e65e-6cea-4cdb4f3525a6" [ 1032.623323] env[62522]: _type = "Task" [ 1032.623323] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.633951] env[62522]: DEBUG oslo_vmware.api [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52db1e18-58a1-e65e-6cea-4cdb4f3525a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.685823] env[62522]: DEBUG nova.compute.manager [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Stashing vm_state: active {{(pid=62522) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1032.716756] env[62522]: DEBUG oslo_vmware.api [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415983, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.769665] env[62522]: DEBUG nova.network.neutron [req-be8fb051-c855-4752-8f80-1cf90bceebce req-9f54e6f8-2c1f-4f18-9b56-9b43572bd3f1 service nova] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Updated VIF entry in instance network info cache for port 7e36641e-fc4a-4223-ab07-33dc49821168. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1032.770136] env[62522]: DEBUG nova.network.neutron [req-be8fb051-c855-4752-8f80-1cf90bceebce req-9f54e6f8-2c1f-4f18-9b56-9b43572bd3f1 service nova] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Updating instance_info_cache with network_info: [{"id": "7e36641e-fc4a-4223-ab07-33dc49821168", "address": "fa:16:3e:f1:bf:49", "network": {"id": "b837f0fb-c2e1-46dd-93b2-62d6c4352316", "bridge": null, "label": "tempest-ServersNegativeTestJSON-1813744063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed393a0454b643eea75c203d1dfd592c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap7e36641e-fc", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.948748] env[62522]: DEBUG oslo_vmware.api [None req-ae51399e-03de-4a12-921d-27394b2dc013 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2415984, 'name': PowerOnVM_Task, 'duration_secs': 0.407} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.950078] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae51399e-03de-4a12-921d-27394b2dc013 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1032.950078] env[62522]: DEBUG nova.compute.manager [None req-ae51399e-03de-4a12-921d-27394b2dc013 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1032.950534] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3c231e2-1d56-4c32-8abc-fb34cdd69aff {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.958733] env[62522]: DEBUG oslo_concurrency.lockutils [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1033.097048] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3d2e2697-a951-48ef-9062-df41650b87d3 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lock "04a9d357-d094-487b-8f09-2f7e0c35f0d7" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 27.436s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.136264] env[62522]: DEBUG oslo_vmware.api [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52db1e18-58a1-e65e-6cea-4cdb4f3525a6, 'name': SearchDatastore_Task, 'duration_secs': 0.010843} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.136565] env[62522]: DEBUG oslo_concurrency.lockutils [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1033.136800] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1033.137071] env[62522]: DEBUG oslo_concurrency.lockutils [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1033.137229] env[62522]: DEBUG oslo_concurrency.lockutils [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.137485] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1033.137782] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-81b4981e-fb2a-4bfd-8ab0-b46e1b5395e8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.142598] env[62522]: DEBUG oslo_concurrency.lockutils [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquiring lock "04a9d357-d094-487b-8f09-2f7e0c35f0d7" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1033.142804] env[62522]: DEBUG oslo_concurrency.lockutils [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lock "04a9d357-d094-487b-8f09-2f7e0c35f0d7" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1033.142977] env[62522]: INFO nova.compute.manager [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Unshelving [ 1033.153018] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1033.153323] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1033.154434] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd60925f-8e74-4aa4-b0b1-5aaa9c42138f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.161668] env[62522]: DEBUG oslo_vmware.api [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1033.161668] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b74b35-7300-a2fa-db45-d92cd15bfbc8" [ 1033.161668] env[62522]: _type = "Task" [ 1033.161668] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.170332] env[62522]: DEBUG oslo_vmware.api [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b74b35-7300-a2fa-db45-d92cd15bfbc8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.207816] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1033.211212] env[62522]: DEBUG oslo_vmware.api [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415983, 'name': PowerOnVM_Task, 'duration_secs': 0.810929} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.211520] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1033.211721] env[62522]: INFO nova.compute.manager [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Took 9.87 seconds to spawn the instance on the hypervisor. [ 1033.211894] env[62522]: DEBUG nova.compute.manager [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1033.212695] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86a41999-c525-4ae9-bf15-61983ed9df6e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.273549] env[62522]: DEBUG oslo_concurrency.lockutils [req-be8fb051-c855-4752-8f80-1cf90bceebce req-9f54e6f8-2c1f-4f18-9b56-9b43572bd3f1 service nova] Releasing lock "refresh_cache-04a9d357-d094-487b-8f09-2f7e0c35f0d7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1033.673542] env[62522]: DEBUG oslo_vmware.api [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b74b35-7300-a2fa-db45-d92cd15bfbc8, 'name': SearchDatastore_Task, 'duration_secs': 0.042728} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.677507] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aafcd41a-c6cc-410d-8945-4ca8e09779fa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.684884] env[62522]: DEBUG oslo_vmware.api [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1033.684884] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520dca07-f922-b875-8d18-413cf7887a75" [ 1033.684884] env[62522]: _type = "Task" [ 1033.684884] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.696202] env[62522]: DEBUG oslo_vmware.api [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520dca07-f922-b875-8d18-413cf7887a75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.733185] env[62522]: INFO nova.compute.manager [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Took 21.34 seconds to build instance. [ 1033.952058] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db67dc45-dbbc-4efb-b02a-72d7ef338a1d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.964247] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3feabb9d-007f-4e66-907e-1704b5138f0e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.999961] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-287af503-f0e8-437b-8fb9-da9177530f17 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.011476] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b518be-6797-4ca1-b8a6-89fb55ec0763 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.033863] env[62522]: DEBUG nova.compute.provider_tree [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1034.178009] env[62522]: DEBUG oslo_concurrency.lockutils [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1034.198305] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1034.198635] env[62522]: DEBUG oslo_vmware.api [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520dca07-f922-b875-8d18-413cf7887a75, 'name': SearchDatastore_Task, 'duration_secs': 0.012083} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.198829] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1034.200128] env[62522]: DEBUG oslo_concurrency.lockutils [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1034.200406] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 5c9b1120-84ad-48d5-8cd4-0cf387963066/5c9b1120-84ad-48d5-8cd4-0cf387963066.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1034.200840] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d3fce0f5-ea18-4636-b909-5b65f8464799 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.212677] env[62522]: DEBUG oslo_vmware.api [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1034.212677] env[62522]: value = "task-2415985" [ 1034.212677] env[62522]: _type = "Task" [ 1034.212677] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.231755] env[62522]: DEBUG oslo_vmware.api [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415985, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.234282] env[62522]: DEBUG oslo_concurrency.lockutils [None req-576ab7ea-9028-4813-9684-0289e9a56057 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "3b2cd0b6-0c7a-411c-a7f5-64835f2179dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.845s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.540725] env[62522]: DEBUG nova.scheduler.client.report [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1034.646914] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "3b2cd0b6-0c7a-411c-a7f5-64835f2179dc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1034.647244] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "3b2cd0b6-0c7a-411c-a7f5-64835f2179dc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.648032] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "3b2cd0b6-0c7a-411c-a7f5-64835f2179dc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1034.648032] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "3b2cd0b6-0c7a-411c-a7f5-64835f2179dc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.648032] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "3b2cd0b6-0c7a-411c-a7f5-64835f2179dc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.650289] env[62522]: INFO nova.compute.manager [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Terminating instance [ 1034.659566] env[62522]: DEBUG nova.compute.manager [req-bdbc0ccf-2f3c-4735-88c3-a8392fc33d2f req-d98b38a7-3f64-421e-ab66-5220b6775188 service nova] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Received event network-changed-36fe2fd3-3447-4032-8c02-5be9712b769d {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1034.659779] env[62522]: DEBUG nova.compute.manager [req-bdbc0ccf-2f3c-4735-88c3-a8392fc33d2f req-d98b38a7-3f64-421e-ab66-5220b6775188 service nova] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Refreshing instance network info cache due to event network-changed-36fe2fd3-3447-4032-8c02-5be9712b769d. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1034.660048] env[62522]: DEBUG oslo_concurrency.lockutils [req-bdbc0ccf-2f3c-4735-88c3-a8392fc33d2f req-d98b38a7-3f64-421e-ab66-5220b6775188 service nova] Acquiring lock "refresh_cache-bf44e269-0297-473e-b6ce-04a40d0ec1b4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.660167] env[62522]: DEBUG oslo_concurrency.lockutils [req-bdbc0ccf-2f3c-4735-88c3-a8392fc33d2f req-d98b38a7-3f64-421e-ab66-5220b6775188 service nova] Acquired lock "refresh_cache-bf44e269-0297-473e-b6ce-04a40d0ec1b4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.660351] env[62522]: DEBUG nova.network.neutron [req-bdbc0ccf-2f3c-4735-88c3-a8392fc33d2f req-d98b38a7-3f64-421e-ab66-5220b6775188 service nova] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Refreshing network info cache for port 36fe2fd3-3447-4032-8c02-5be9712b769d {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1034.709636] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1034.709636] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Starting heal instance info cache {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1034.731218] env[62522]: DEBUG oslo_vmware.api [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415985, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.046351] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.466s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1035.047014] env[62522]: DEBUG nova.compute.manager [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1035.050679] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.739s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1035.050990] env[62522]: DEBUG nova.objects.instance [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lazy-loading 'resources' on Instance uuid 921c14c9-27fa-4eda-9831-6263ad0d6c57 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1035.155044] env[62522]: DEBUG nova.compute.manager [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1035.155044] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1035.155767] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fe4b4ec-f840-48fd-8131-cd66de932c5a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.166808] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1035.167033] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9242346a-1059-4803-80de-1c60a62fda50 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.176208] env[62522]: DEBUG oslo_vmware.api [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1035.176208] env[62522]: value = "task-2415986" [ 1035.176208] env[62522]: _type = "Task" [ 1035.176208] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.185994] env[62522]: DEBUG oslo_vmware.api [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415986, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.224981] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Acquiring lock "a10c4dee-4490-445a-bea2-9f8ef5425d15" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.227674] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Lock "a10c4dee-4490-445a-bea2-9f8ef5425d15" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1035.227674] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Acquiring lock "a10c4dee-4490-445a-bea2-9f8ef5425d15-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.227674] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Lock "a10c4dee-4490-445a-bea2-9f8ef5425d15-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1035.227674] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Lock "a10c4dee-4490-445a-bea2-9f8ef5425d15-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1035.227674] env[62522]: DEBUG oslo_vmware.api [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415985, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.766401} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.228063] env[62522]: INFO nova.compute.manager [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Terminating instance [ 1035.229960] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 5c9b1120-84ad-48d5-8cd4-0cf387963066/5c9b1120-84ad-48d5-8cd4-0cf387963066.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1035.230191] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1035.234805] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b80579d4-6422-4dc3-af39-5a5a1f082eb5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.242572] env[62522]: DEBUG oslo_vmware.api [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1035.242572] env[62522]: value = "task-2415987" [ 1035.242572] env[62522]: _type = "Task" [ 1035.242572] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.254179] env[62522]: DEBUG oslo_vmware.api [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415987, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.514037] env[62522]: DEBUG nova.network.neutron [req-bdbc0ccf-2f3c-4735-88c3-a8392fc33d2f req-d98b38a7-3f64-421e-ab66-5220b6775188 service nova] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Updated VIF entry in instance network info cache for port 36fe2fd3-3447-4032-8c02-5be9712b769d. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1035.514159] env[62522]: DEBUG nova.network.neutron [req-bdbc0ccf-2f3c-4735-88c3-a8392fc33d2f req-d98b38a7-3f64-421e-ab66-5220b6775188 service nova] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Updating instance_info_cache with network_info: [{"id": "36fe2fd3-3447-4032-8c02-5be9712b769d", "address": "fa:16:3e:2e:5d:25", "network": {"id": "5f1d73d1-ff9e-4081-87cf-8df6294f67c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-892212702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "962664c996f24cf9ae192f79fae18ca4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "419a5b3f-4c6f-4168-9def-746b4d8c5c24", "external-id": "nsx-vlan-transportzone-656", "segmentation_id": 656, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36fe2fd3-34", "ovs_interfaceid": "36fe2fd3-3447-4032-8c02-5be9712b769d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.554223] env[62522]: DEBUG nova.compute.utils [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1035.555778] env[62522]: DEBUG nova.compute.manager [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1035.555956] env[62522]: DEBUG nova.network.neutron [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1035.625313] env[62522]: DEBUG nova.policy [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3549d85b612044969af8fda179d169ba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61314d3f0b9e4c368312e714a953e549', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1035.686673] env[62522]: DEBUG oslo_vmware.api [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415986, 'name': PowerOffVM_Task, 'duration_secs': 0.314127} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.689279] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1035.689459] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1035.689912] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4c2d3945-9923-472a-8e95-ad8aa9a70445 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.734982] env[62522]: DEBUG nova.compute.manager [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1035.735229] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1035.738852] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8178ee83-81a9-4698-81c9-0d3ab955dcc1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.751134] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1035.754207] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-69d683c4-195c-4315-998c-0a66a7e28857 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.761567] env[62522]: DEBUG oslo_vmware.api [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415987, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071171} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.761832] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1035.762721] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d522f754-0f14-4966-8508-5e5e1013bdef {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.769357] env[62522]: DEBUG oslo_vmware.api [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Waiting for the task: (returnval){ [ 1035.769357] env[62522]: value = "task-2415989" [ 1035.769357] env[62522]: _type = "Task" [ 1035.769357] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.791295] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] 5c9b1120-84ad-48d5-8cd4-0cf387963066/5c9b1120-84ad-48d5-8cd4-0cf387963066.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1035.795595] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b75532f3-9a11-4f8c-ab45-322e02ae0d66 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.810080] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1035.810332] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1035.810515] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Deleting the datastore file [datastore2] 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1035.814333] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-42d20723-0b52-4e6c-b225-21ead3e10bdc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.816166] env[62522]: DEBUG oslo_vmware.api [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Task: {'id': task-2415989, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.827089] env[62522]: DEBUG oslo_vmware.api [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1035.827089] env[62522]: value = "task-2415990" [ 1035.827089] env[62522]: _type = "Task" [ 1035.827089] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.827413] env[62522]: DEBUG oslo_vmware.api [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1035.827413] env[62522]: value = "task-2415991" [ 1035.827413] env[62522]: _type = "Task" [ 1035.827413] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.842834] env[62522]: DEBUG oslo_vmware.api [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415990, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.843137] env[62522]: DEBUG oslo_vmware.api [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415991, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.934273] env[62522]: DEBUG nova.network.neutron [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Successfully created port: f38ae927-c0d7-4f7c-91ab-2354af588af0 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1035.948968] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48074259-6d5a-4f6e-a1b7-6034631ed60f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.960835] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1491f965-2928-4308-b2e5-a3929b31d970 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.005084] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-992a4f77-78a0-4518-9750-2d4e7f84cd94 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.015644] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ed9c525-896b-481c-86d9-00c2ebc23b73 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.022406] env[62522]: DEBUG oslo_concurrency.lockutils [req-bdbc0ccf-2f3c-4735-88c3-a8392fc33d2f req-d98b38a7-3f64-421e-ab66-5220b6775188 service nova] Releasing lock "refresh_cache-bf44e269-0297-473e-b6ce-04a40d0ec1b4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.036112] env[62522]: DEBUG nova.compute.provider_tree [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1036.062050] env[62522]: DEBUG nova.compute.manager [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1036.280413] env[62522]: DEBUG oslo_vmware.api [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Task: {'id': task-2415989, 'name': PowerOffVM_Task, 'duration_secs': 0.342029} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.280815] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1036.280879] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1036.281154] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-910e984b-59a6-498d-aee6-fb2a888d6d6e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.343639] env[62522]: DEBUG oslo_vmware.api [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2415990, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.354791} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.347446] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1036.347696] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1036.347887] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1036.348079] env[62522]: INFO nova.compute.manager [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1036.348527] env[62522]: DEBUG oslo.service.loopingcall [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1036.348758] env[62522]: DEBUG oslo_vmware.api [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415991, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.348984] env[62522]: DEBUG nova.compute.manager [-] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1036.349139] env[62522]: DEBUG nova.network.neutron [-] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1036.353790] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1036.354240] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1036.354509] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Deleting the datastore file [datastore2] a10c4dee-4490-445a-bea2-9f8ef5425d15 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1036.355827] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-14ae38b9-543f-40ed-be9d-d67217c5535e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.363786] env[62522]: DEBUG oslo_vmware.api [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Waiting for the task: (returnval){ [ 1036.363786] env[62522]: value = "task-2415993" [ 1036.363786] env[62522]: _type = "Task" [ 1036.363786] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.373125] env[62522]: DEBUG oslo_vmware.api [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Task: {'id': task-2415993, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.542035] env[62522]: DEBUG nova.scheduler.client.report [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1036.654898] env[62522]: DEBUG nova.compute.manager [req-cd511269-410b-4b9e-b412-04271d120ca3 req-35c04365-1202-45e1-ba16-0782859fa312 service nova] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Received event network-vif-deleted-6adfce51-a4d5-4682-bee9-e6bea918aa38 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1036.655112] env[62522]: INFO nova.compute.manager [req-cd511269-410b-4b9e-b412-04271d120ca3 req-35c04365-1202-45e1-ba16-0782859fa312 service nova] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Neutron deleted interface 6adfce51-a4d5-4682-bee9-e6bea918aa38; detaching it from the instance and deleting it from the info cache [ 1036.655291] env[62522]: DEBUG nova.network.neutron [req-cd511269-410b-4b9e-b412-04271d120ca3 req-35c04365-1202-45e1-ba16-0782859fa312 service nova] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.689578] env[62522]: DEBUG nova.compute.manager [req-84c63550-4811-4b02-b42b-7950b880c72b req-29213b2f-34ce-4ed0-a796-0cafa87736e1 service nova] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Received event network-changed-36fe2fd3-3447-4032-8c02-5be9712b769d {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1036.689747] env[62522]: DEBUG nova.compute.manager [req-84c63550-4811-4b02-b42b-7950b880c72b req-29213b2f-34ce-4ed0-a796-0cafa87736e1 service nova] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Refreshing instance network info cache due to event network-changed-36fe2fd3-3447-4032-8c02-5be9712b769d. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1036.690243] env[62522]: DEBUG oslo_concurrency.lockutils [req-84c63550-4811-4b02-b42b-7950b880c72b req-29213b2f-34ce-4ed0-a796-0cafa87736e1 service nova] Acquiring lock "refresh_cache-bf44e269-0297-473e-b6ce-04a40d0ec1b4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1036.690408] env[62522]: DEBUG oslo_concurrency.lockutils [req-84c63550-4811-4b02-b42b-7950b880c72b req-29213b2f-34ce-4ed0-a796-0cafa87736e1 service nova] Acquired lock "refresh_cache-bf44e269-0297-473e-b6ce-04a40d0ec1b4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.690571] env[62522]: DEBUG nova.network.neutron [req-84c63550-4811-4b02-b42b-7950b880c72b req-29213b2f-34ce-4ed0-a796-0cafa87736e1 service nova] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Refreshing network info cache for port 36fe2fd3-3447-4032-8c02-5be9712b769d {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1036.839080] env[62522]: DEBUG oslo_vmware.api [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415991, 'name': ReconfigVM_Task, 'duration_secs': 0.645412} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.839388] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Reconfigured VM instance instance-00000058 to attach disk [datastore1] 5c9b1120-84ad-48d5-8cd4-0cf387963066/5c9b1120-84ad-48d5-8cd4-0cf387963066.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1036.840028] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-453a9e7d-53b9-4132-bc1a-3297eda13990 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.847270] env[62522]: DEBUG oslo_vmware.api [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1036.847270] env[62522]: value = "task-2415994" [ 1036.847270] env[62522]: _type = "Task" [ 1036.847270] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.855613] env[62522]: DEBUG oslo_vmware.api [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415994, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.873476] env[62522]: DEBUG oslo_vmware.api [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Task: {'id': task-2415993, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.365656} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.873680] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1036.873909] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1036.874055] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1036.874237] env[62522]: INFO nova.compute.manager [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1036.874480] env[62522]: DEBUG oslo.service.loopingcall [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1036.874675] env[62522]: DEBUG nova.compute.manager [-] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1036.874801] env[62522]: DEBUG nova.network.neutron [-] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1037.051146] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.054848] env[62522]: DEBUG oslo_concurrency.lockutils [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.096s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1037.055226] env[62522]: DEBUG nova.objects.instance [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lazy-loading 'resources' on Instance uuid e1225c6f-9025-41ff-94fa-a55af49aeed2 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1037.075500] env[62522]: DEBUG nova.compute.manager [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1037.079734] env[62522]: INFO nova.scheduler.client.report [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Deleted allocations for instance 921c14c9-27fa-4eda-9831-6263ad0d6c57 [ 1037.106349] env[62522]: DEBUG nova.virt.hardware [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1037.106598] env[62522]: DEBUG nova.virt.hardware [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1037.108210] env[62522]: DEBUG nova.virt.hardware [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1037.108210] env[62522]: DEBUG nova.virt.hardware [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1037.108210] env[62522]: DEBUG nova.virt.hardware [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1037.108210] env[62522]: DEBUG nova.virt.hardware [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1037.108210] env[62522]: DEBUG nova.virt.hardware [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1037.108210] env[62522]: DEBUG nova.virt.hardware [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1037.108210] env[62522]: DEBUG nova.virt.hardware [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1037.108210] env[62522]: DEBUG nova.virt.hardware [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1037.108614] env[62522]: DEBUG nova.virt.hardware [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1037.110757] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24c17e76-1676-4852-bb1c-5e4e3bc8d87a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.121213] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-764281d7-437f-4966-9fcd-834015d394c0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.127300] env[62522]: DEBUG nova.network.neutron [-] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1037.160098] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-abe35eed-d304-415c-ac70-e003def2dc21 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.171954] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89b9ed36-6e29-4963-923e-c9ce7dd4ebb9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.212297] env[62522]: DEBUG nova.compute.manager [req-cd511269-410b-4b9e-b412-04271d120ca3 req-35c04365-1202-45e1-ba16-0782859fa312 service nova] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Detach interface failed, port_id=6adfce51-a4d5-4682-bee9-e6bea918aa38, reason: Instance 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1037.358175] env[62522]: DEBUG oslo_vmware.api [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415994, 'name': Rename_Task, 'duration_secs': 0.195102} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.358622] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1037.358701] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-efefa2d1-e27f-4ba1-a9b6-0296711d7916 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.370220] env[62522]: DEBUG oslo_vmware.api [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1037.370220] env[62522]: value = "task-2415995" [ 1037.370220] env[62522]: _type = "Task" [ 1037.370220] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.378617] env[62522]: DEBUG oslo_vmware.api [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415995, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.529101] env[62522]: DEBUG nova.network.neutron [req-84c63550-4811-4b02-b42b-7950b880c72b req-29213b2f-34ce-4ed0-a796-0cafa87736e1 service nova] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Updated VIF entry in instance network info cache for port 36fe2fd3-3447-4032-8c02-5be9712b769d. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1037.529489] env[62522]: DEBUG nova.network.neutron [req-84c63550-4811-4b02-b42b-7950b880c72b req-29213b2f-34ce-4ed0-a796-0cafa87736e1 service nova] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Updating instance_info_cache with network_info: [{"id": "36fe2fd3-3447-4032-8c02-5be9712b769d", "address": "fa:16:3e:2e:5d:25", "network": {"id": "5f1d73d1-ff9e-4081-87cf-8df6294f67c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-892212702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "962664c996f24cf9ae192f79fae18ca4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "419a5b3f-4c6f-4168-9def-746b4d8c5c24", "external-id": "nsx-vlan-transportzone-656", "segmentation_id": 656, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36fe2fd3-34", "ovs_interfaceid": "36fe2fd3-3447-4032-8c02-5be9712b769d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1037.588890] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cb7d3942-88e0-4ce0-bd40-0de501c46b64 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lock "921c14c9-27fa-4eda-9831-6263ad0d6c57" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.819s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.640607] env[62522]: INFO nova.compute.manager [-] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Took 1.29 seconds to deallocate network for instance. [ 1037.772299] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "refresh_cache-c181ce48-9fe2-4400-9047-f8b5a7159dd3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1037.772513] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquired lock "refresh_cache-c181ce48-9fe2-4400-9047-f8b5a7159dd3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.772693] env[62522]: DEBUG nova.network.neutron [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Forcefully refreshing network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1037.836707] env[62522]: DEBUG nova.network.neutron [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Successfully updated port: f38ae927-c0d7-4f7c-91ab-2354af588af0 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1037.881362] env[62522]: DEBUG oslo_vmware.api [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2415995, 'name': PowerOnVM_Task, 'duration_secs': 0.474692} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.884085] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1037.884311] env[62522]: INFO nova.compute.manager [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Took 9.98 seconds to spawn the instance on the hypervisor. [ 1037.884495] env[62522]: DEBUG nova.compute.manager [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1037.885759] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5a9c3c7-58f3-485e-b67d-026d11e7fd8d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.910072] env[62522]: DEBUG nova.network.neutron [-] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1037.969314] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7afd9b21-cd0f-4bb7-b273-7cabc8d5eb81 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.980395] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91d533e7-5c3c-4c04-b93e-832e428c2c86 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.020549] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b52b93d-f8e2-434b-ae84-7f52c96405e4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.030289] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1258d9aa-441d-49bd-b72c-62b1e704b06b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.040018] env[62522]: DEBUG oslo_concurrency.lockutils [req-84c63550-4811-4b02-b42b-7950b880c72b req-29213b2f-34ce-4ed0-a796-0cafa87736e1 service nova] Releasing lock "refresh_cache-bf44e269-0297-473e-b6ce-04a40d0ec1b4" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1038.046818] env[62522]: DEBUG nova.compute.provider_tree [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1038.150776] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1038.337556] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "refresh_cache-1a5a235a-477f-4da5-b5c1-ee057211cce8" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1038.337695] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquired lock "refresh_cache-1a5a235a-477f-4da5-b5c1-ee057211cce8" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.337867] env[62522]: DEBUG nova.network.neutron [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1038.403878] env[62522]: INFO nova.compute.manager [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Took 22.63 seconds to build instance. [ 1038.415749] env[62522]: INFO nova.compute.manager [-] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Took 1.54 seconds to deallocate network for instance. [ 1038.550539] env[62522]: DEBUG nova.scheduler.client.report [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1038.652471] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3e151e3c-892a-4850-967d-6e1c1d04bbd9 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "interface-fcd0eef6-d059-4495-a982-058b6c9626d1-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1038.652856] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3e151e3c-892a-4850-967d-6e1c1d04bbd9 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "interface-fcd0eef6-d059-4495-a982-058b6c9626d1-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1038.653234] env[62522]: DEBUG nova.objects.instance [None req-3e151e3c-892a-4850-967d-6e1c1d04bbd9 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lazy-loading 'flavor' on Instance uuid fcd0eef6-d059-4495-a982-058b6c9626d1 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1038.694699] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquiring lock "4e9436df-c86b-429b-abc2-97f760858055" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1038.694955] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lock "4e9436df-c86b-429b-abc2-97f760858055" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1038.695181] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquiring lock "4e9436df-c86b-429b-abc2-97f760858055-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1038.695360] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lock "4e9436df-c86b-429b-abc2-97f760858055-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1038.695527] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lock "4e9436df-c86b-429b-abc2-97f760858055-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1038.697528] env[62522]: INFO nova.compute.manager [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Terminating instance [ 1038.756906] env[62522]: DEBUG oslo_concurrency.lockutils [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquiring lock "97f4c6ab-04de-4069-8ce0-1509c30ffb0f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1038.757330] env[62522]: DEBUG oslo_concurrency.lockutils [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lock "97f4c6ab-04de-4069-8ce0-1509c30ffb0f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1038.757643] env[62522]: DEBUG oslo_concurrency.lockutils [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquiring lock "97f4c6ab-04de-4069-8ce0-1509c30ffb0f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1038.757912] env[62522]: DEBUG oslo_concurrency.lockutils [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lock "97f4c6ab-04de-4069-8ce0-1509c30ffb0f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1038.758290] env[62522]: DEBUG oslo_concurrency.lockutils [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lock "97f4c6ab-04de-4069-8ce0-1509c30ffb0f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1038.761248] env[62522]: INFO nova.compute.manager [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Terminating instance [ 1038.765501] env[62522]: DEBUG nova.compute.manager [req-c10bc119-9fb9-4096-8fba-ed82569dff7e req-1046d496-425c-4b08-9455-ec55415a6e97 service nova] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Received event network-vif-deleted-32b31ef7-1045-4a88-9dba-5d5e5040efc2 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1038.782658] env[62522]: DEBUG nova.compute.manager [req-14dbf14f-047e-4dfa-9c5c-1447ac783420 req-8bdf1f30-3138-4897-bde3-4f72f3941bb9 service nova] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Received event network-vif-plugged-f38ae927-c0d7-4f7c-91ab-2354af588af0 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1038.782994] env[62522]: DEBUG oslo_concurrency.lockutils [req-14dbf14f-047e-4dfa-9c5c-1447ac783420 req-8bdf1f30-3138-4897-bde3-4f72f3941bb9 service nova] Acquiring lock "1a5a235a-477f-4da5-b5c1-ee057211cce8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1038.783930] env[62522]: DEBUG oslo_concurrency.lockutils [req-14dbf14f-047e-4dfa-9c5c-1447ac783420 req-8bdf1f30-3138-4897-bde3-4f72f3941bb9 service nova] Lock "1a5a235a-477f-4da5-b5c1-ee057211cce8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1038.784180] env[62522]: DEBUG oslo_concurrency.lockutils [req-14dbf14f-047e-4dfa-9c5c-1447ac783420 req-8bdf1f30-3138-4897-bde3-4f72f3941bb9 service nova] Lock "1a5a235a-477f-4da5-b5c1-ee057211cce8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1038.784457] env[62522]: DEBUG nova.compute.manager [req-14dbf14f-047e-4dfa-9c5c-1447ac783420 req-8bdf1f30-3138-4897-bde3-4f72f3941bb9 service nova] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] No waiting events found dispatching network-vif-plugged-f38ae927-c0d7-4f7c-91ab-2354af588af0 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1038.784724] env[62522]: WARNING nova.compute.manager [req-14dbf14f-047e-4dfa-9c5c-1447ac783420 req-8bdf1f30-3138-4897-bde3-4f72f3941bb9 service nova] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Received unexpected event network-vif-plugged-f38ae927-c0d7-4f7c-91ab-2354af588af0 for instance with vm_state building and task_state spawning. [ 1038.785446] env[62522]: DEBUG nova.compute.manager [req-14dbf14f-047e-4dfa-9c5c-1447ac783420 req-8bdf1f30-3138-4897-bde3-4f72f3941bb9 service nova] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Received event network-changed-f38ae927-c0d7-4f7c-91ab-2354af588af0 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1038.785446] env[62522]: DEBUG nova.compute.manager [req-14dbf14f-047e-4dfa-9c5c-1447ac783420 req-8bdf1f30-3138-4897-bde3-4f72f3941bb9 service nova] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Refreshing instance network info cache due to event network-changed-f38ae927-c0d7-4f7c-91ab-2354af588af0. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1038.785628] env[62522]: DEBUG oslo_concurrency.lockutils [req-14dbf14f-047e-4dfa-9c5c-1447ac783420 req-8bdf1f30-3138-4897-bde3-4f72f3941bb9 service nova] Acquiring lock "refresh_cache-1a5a235a-477f-4da5-b5c1-ee057211cce8" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1038.871174] env[62522]: DEBUG nova.network.neutron [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1038.906126] env[62522]: DEBUG oslo_concurrency.lockutils [None req-931c628f-5ae3-485a-9b74-f7cfcc4f12eb tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Lock "5c9b1120-84ad-48d5-8cd4-0cf387963066" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.142s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1038.922516] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.020175] env[62522]: DEBUG nova.network.neutron [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Updating instance_info_cache with network_info: [{"id": "f38ae927-c0d7-4f7c-91ab-2354af588af0", "address": "fa:16:3e:79:90:96", "network": {"id": "d6a06fb0-929f-44b6-93c4-698be8498194", "bridge": "br-int", "label": "tempest-ImagesTestJSON-272550236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61314d3f0b9e4c368312e714a953e549", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "399f3826-705c-45f7-9fe0-3a08a945151a", "external-id": "nsx-vlan-transportzone-936", "segmentation_id": 936, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf38ae927-c0", "ovs_interfaceid": "f38ae927-c0d7-4f7c-91ab-2354af588af0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.056445] env[62522]: DEBUG oslo_concurrency.lockutils [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.059220] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 5.851s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.074562] env[62522]: DEBUG nova.network.neutron [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Updating instance_info_cache with network_info: [{"id": "2e33c70f-036d-459c-a393-f570cbf7089c", "address": "fa:16:3e:58:55:4e", "network": {"id": "5f1d73d1-ff9e-4081-87cf-8df6294f67c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-892212702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "962664c996f24cf9ae192f79fae18ca4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "419a5b3f-4c6f-4168-9def-746b4d8c5c24", "external-id": "nsx-vlan-transportzone-656", "segmentation_id": 656, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e33c70f-03", "ovs_interfaceid": "2e33c70f-036d-459c-a393-f570cbf7089c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.088499] env[62522]: INFO nova.scheduler.client.report [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Deleted allocations for instance e1225c6f-9025-41ff-94fa-a55af49aeed2 [ 1039.159451] env[62522]: DEBUG nova.objects.instance [None req-3e151e3c-892a-4850-967d-6e1c1d04bbd9 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lazy-loading 'pci_requests' on Instance uuid fcd0eef6-d059-4495-a982-058b6c9626d1 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1039.202036] env[62522]: DEBUG nova.compute.manager [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1039.202210] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1039.203499] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a67e736-570a-42de-8962-c6f0254bb50e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.212896] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1039.213177] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-daa3b4c6-422f-4bf4-b846-ad1abed7e525 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.220796] env[62522]: DEBUG oslo_vmware.api [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for the task: (returnval){ [ 1039.220796] env[62522]: value = "task-2415996" [ 1039.220796] env[62522]: _type = "Task" [ 1039.220796] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.230336] env[62522]: DEBUG oslo_vmware.api [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415996, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.268370] env[62522]: DEBUG nova.compute.manager [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1039.268558] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1039.269399] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18d28c4c-65d0-45b1-a062-5b8914e9a068 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.277911] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1039.278229] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eafdf15d-c6ba-41d5-98b9-b792b796cb6f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.285736] env[62522]: DEBUG oslo_vmware.api [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for the task: (returnval){ [ 1039.285736] env[62522]: value = "task-2415997" [ 1039.285736] env[62522]: _type = "Task" [ 1039.285736] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.297440] env[62522]: DEBUG oslo_vmware.api [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415997, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.522989] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Releasing lock "refresh_cache-1a5a235a-477f-4da5-b5c1-ee057211cce8" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1039.523463] env[62522]: DEBUG nova.compute.manager [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Instance network_info: |[{"id": "f38ae927-c0d7-4f7c-91ab-2354af588af0", "address": "fa:16:3e:79:90:96", "network": {"id": "d6a06fb0-929f-44b6-93c4-698be8498194", "bridge": "br-int", "label": "tempest-ImagesTestJSON-272550236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61314d3f0b9e4c368312e714a953e549", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "399f3826-705c-45f7-9fe0-3a08a945151a", "external-id": "nsx-vlan-transportzone-936", "segmentation_id": 936, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf38ae927-c0", "ovs_interfaceid": "f38ae927-c0d7-4f7c-91ab-2354af588af0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1039.523907] env[62522]: DEBUG oslo_concurrency.lockutils [req-14dbf14f-047e-4dfa-9c5c-1447ac783420 req-8bdf1f30-3138-4897-bde3-4f72f3941bb9 service nova] Acquired lock "refresh_cache-1a5a235a-477f-4da5-b5c1-ee057211cce8" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.524184] env[62522]: DEBUG nova.network.neutron [req-14dbf14f-047e-4dfa-9c5c-1447ac783420 req-8bdf1f30-3138-4897-bde3-4f72f3941bb9 service nova] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Refreshing network info cache for port f38ae927-c0d7-4f7c-91ab-2354af588af0 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1039.525790] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:79:90:96', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '399f3826-705c-45f7-9fe0-3a08a945151a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f38ae927-c0d7-4f7c-91ab-2354af588af0', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1039.534285] env[62522]: DEBUG oslo.service.loopingcall [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1039.534829] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1039.535711] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eb090eba-4908-48e7-9045-e0a0eb7c7f97 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.558385] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1039.558385] env[62522]: value = "task-2415998" [ 1039.558385] env[62522]: _type = "Task" [ 1039.558385] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.566114] env[62522]: INFO nova.compute.claims [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1039.576364] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415998, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.576858] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Releasing lock "refresh_cache-c181ce48-9fe2-4400-9047-f8b5a7159dd3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1039.577077] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Updated the network info_cache for instance {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 1039.577281] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1039.577438] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1039.577590] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1039.578189] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1039.578189] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1039.578189] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1039.578443] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62522) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1039.578443] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1039.595551] env[62522]: DEBUG oslo_concurrency.lockutils [None req-530682e2-4898-4d00-bd61-5e054c638996 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "e1225c6f-9025-41ff-94fa-a55af49aeed2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.306s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.638056] env[62522]: INFO nova.compute.manager [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Rescuing [ 1039.638056] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquiring lock "refresh_cache-5c9b1120-84ad-48d5-8cd4-0cf387963066" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1039.638251] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquired lock "refresh_cache-5c9b1120-84ad-48d5-8cd4-0cf387963066" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.638357] env[62522]: DEBUG nova.network.neutron [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1039.662954] env[62522]: DEBUG nova.objects.base [None req-3e151e3c-892a-4850-967d-6e1c1d04bbd9 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62522) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1039.663317] env[62522]: DEBUG nova.network.neutron [None req-3e151e3c-892a-4850-967d-6e1c1d04bbd9 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1039.733306] env[62522]: DEBUG oslo_vmware.api [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415996, 'name': PowerOffVM_Task, 'duration_secs': 0.317089} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.733306] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1039.733548] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1039.733589] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5305e359-1a9d-425e-a848-52f70d1256a7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.797729] env[62522]: DEBUG oslo_vmware.api [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2415997, 'name': PowerOffVM_Task, 'duration_secs': 0.264525} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.798150] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1039.798259] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1039.798567] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f6a376a3-5109-4871-8641-da54b295d514 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.813793] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3e151e3c-892a-4850-967d-6e1c1d04bbd9 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "interface-fcd0eef6-d059-4495-a982-058b6c9626d1-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.161s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.821495] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1039.821791] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1039.822045] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Deleting the datastore file [datastore1] 4e9436df-c86b-429b-abc2-97f760858055 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1039.822364] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9b69e0b0-b221-4aaf-ba92-8ef67de4c9d8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.831797] env[62522]: DEBUG oslo_vmware.api [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for the task: (returnval){ [ 1039.831797] env[62522]: value = "task-2416001" [ 1039.831797] env[62522]: _type = "Task" [ 1039.831797] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.841206] env[62522]: DEBUG oslo_vmware.api [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2416001, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.884442] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1039.884682] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1039.884869] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Deleting the datastore file [datastore2] 97f4c6ab-04de-4069-8ce0-1509c30ffb0f {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1039.885177] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ff7def9e-2112-494c-b5bd-6b2ff20ec0c1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.893120] env[62522]: DEBUG oslo_vmware.api [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for the task: (returnval){ [ 1039.893120] env[62522]: value = "task-2416002" [ 1039.893120] env[62522]: _type = "Task" [ 1039.893120] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.904390] env[62522]: DEBUG oslo_vmware.api [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2416002, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.069644] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2415998, 'name': CreateVM_Task, 'duration_secs': 0.38883} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.069840] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1040.070693] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1040.071165] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.071165] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1040.074917] env[62522]: INFO nova.compute.resource_tracker [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Updating resource usage from migration 31f1ffb8-0f4c-4352-bb6b-cf4a60fb08b0 [ 1040.077192] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6143c16-b559-498c-aa76-dd65be583453 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.081483] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.083367] env[62522]: DEBUG oslo_vmware.api [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 1040.083367] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52fdfbce-b669-4a76-f5ee-99ec4c5cea3b" [ 1040.083367] env[62522]: _type = "Task" [ 1040.083367] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.092964] env[62522]: DEBUG oslo_vmware.api [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52fdfbce-b669-4a76-f5ee-99ec4c5cea3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.339538] env[62522]: DEBUG nova.network.neutron [req-14dbf14f-047e-4dfa-9c5c-1447ac783420 req-8bdf1f30-3138-4897-bde3-4f72f3941bb9 service nova] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Updated VIF entry in instance network info cache for port f38ae927-c0d7-4f7c-91ab-2354af588af0. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1040.340633] env[62522]: DEBUG nova.network.neutron [req-14dbf14f-047e-4dfa-9c5c-1447ac783420 req-8bdf1f30-3138-4897-bde3-4f72f3941bb9 service nova] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Updating instance_info_cache with network_info: [{"id": "f38ae927-c0d7-4f7c-91ab-2354af588af0", "address": "fa:16:3e:79:90:96", "network": {"id": "d6a06fb0-929f-44b6-93c4-698be8498194", "bridge": "br-int", "label": "tempest-ImagesTestJSON-272550236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61314d3f0b9e4c368312e714a953e549", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "399f3826-705c-45f7-9fe0-3a08a945151a", "external-id": "nsx-vlan-transportzone-936", "segmentation_id": 936, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf38ae927-c0", "ovs_interfaceid": "f38ae927-c0d7-4f7c-91ab-2354af588af0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.353023] env[62522]: DEBUG oslo_vmware.api [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2416001, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171465} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.353023] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1040.355783] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1040.355783] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1040.355783] env[62522]: INFO nova.compute.manager [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1040.355783] env[62522]: DEBUG oslo.service.loopingcall [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1040.355783] env[62522]: DEBUG nova.network.neutron [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Updating instance_info_cache with network_info: [{"id": "78283962-9062-464a-b1f4-a2319257559b", "address": "fa:16:3e:6a:27:1b", "network": {"id": "4b04f6e1-0714-469b-9941-be6f5b6128d0", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-292476225-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e1ca5dfb8f2d4b2e932679e017fe8b3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78283962-90", "ovs_interfaceid": "78283962-9062-464a-b1f4-a2319257559b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.358238] env[62522]: DEBUG nova.compute.manager [-] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1040.358238] env[62522]: DEBUG nova.network.neutron [-] [instance: 4e9436df-c86b-429b-abc2-97f760858055] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1040.404668] env[62522]: DEBUG oslo_vmware.api [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Task: {'id': task-2416002, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173838} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.406135] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1040.406435] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1040.406680] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1040.406899] env[62522]: INFO nova.compute.manager [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1040.407176] env[62522]: DEBUG oslo.service.loopingcall [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1040.408078] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-240c6d80-a1e4-4ff0-ab2a-f40e3dcc88f9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.411647] env[62522]: DEBUG nova.compute.manager [-] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1040.411776] env[62522]: DEBUG nova.network.neutron [-] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1040.420085] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e8e028d-e6d2-43ac-9681-89b154074f03 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.469527] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59796615-b9d8-4e6a-9cd1-36cf0d2bac61 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.478941] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab52a2a7-3afa-4a98-8a04-f179ee299b66 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.497179] env[62522]: DEBUG nova.compute.provider_tree [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1040.598039] env[62522]: DEBUG oslo_vmware.api [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52fdfbce-b669-4a76-f5ee-99ec4c5cea3b, 'name': SearchDatastore_Task, 'duration_secs': 0.013666} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.598039] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1040.598039] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1040.598039] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1040.598742] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.598742] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1040.598742] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5df7bdb0-17fd-4248-99da-6c371141ecd7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.610575] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1040.610945] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1040.612181] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93fb9419-81b0-4fd4-ac07-fe896f13d3bd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.620880] env[62522]: DEBUG oslo_vmware.api [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 1040.620880] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529a9c2a-fcf2-93da-a3c8-21fb70b4788b" [ 1040.620880] env[62522]: _type = "Task" [ 1040.620880] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.631344] env[62522]: DEBUG oslo_vmware.api [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529a9c2a-fcf2-93da-a3c8-21fb70b4788b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.824211] env[62522]: DEBUG nova.compute.manager [req-b2dfa4d8-cd0a-41df-99c0-bfcb0a5bba3f req-bd8f5d94-4eb8-466b-a985-32d6de07befe service nova] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Received event network-vif-deleted-18f42e25-7b00-475d-8f2f-b150679eeeef {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1040.824211] env[62522]: INFO nova.compute.manager [req-b2dfa4d8-cd0a-41df-99c0-bfcb0a5bba3f req-bd8f5d94-4eb8-466b-a985-32d6de07befe service nova] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Neutron deleted interface 18f42e25-7b00-475d-8f2f-b150679eeeef; detaching it from the instance and deleting it from the info cache [ 1040.824211] env[62522]: DEBUG nova.network.neutron [req-b2dfa4d8-cd0a-41df-99c0-bfcb0a5bba3f req-bd8f5d94-4eb8-466b-a985-32d6de07befe service nova] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.844690] env[62522]: DEBUG nova.compute.manager [req-d4430e2a-10d2-47d0-977d-0079980d2310 req-d81a08cd-1fda-481f-93dc-96c743b52130 service nova] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Received event network-vif-deleted-b498b6ac-e4c6-46e5-89f4-804206a74bcf {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1040.844898] env[62522]: INFO nova.compute.manager [req-d4430e2a-10d2-47d0-977d-0079980d2310 req-d81a08cd-1fda-481f-93dc-96c743b52130 service nova] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Neutron deleted interface b498b6ac-e4c6-46e5-89f4-804206a74bcf; detaching it from the instance and deleting it from the info cache [ 1040.845088] env[62522]: DEBUG nova.network.neutron [req-d4430e2a-10d2-47d0-977d-0079980d2310 req-d81a08cd-1fda-481f-93dc-96c743b52130 service nova] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.846563] env[62522]: DEBUG oslo_concurrency.lockutils [req-14dbf14f-047e-4dfa-9c5c-1447ac783420 req-8bdf1f30-3138-4897-bde3-4f72f3941bb9 service nova] Releasing lock "refresh_cache-1a5a235a-477f-4da5-b5c1-ee057211cce8" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1040.861521] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Releasing lock "refresh_cache-5c9b1120-84ad-48d5-8cd4-0cf387963066" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1041.001505] env[62522]: DEBUG nova.scheduler.client.report [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1041.132572] env[62522]: DEBUG oslo_vmware.api [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529a9c2a-fcf2-93da-a3c8-21fb70b4788b, 'name': SearchDatastore_Task, 'duration_secs': 0.012362} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.133238] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c034dd5-2a42-46fc-941c-f9ee1e8858d7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.139962] env[62522]: DEBUG oslo_vmware.api [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 1041.139962] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5245b371-adbb-9b69-eaf8-66aa2b673fb9" [ 1041.139962] env[62522]: _type = "Task" [ 1041.139962] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.149212] env[62522]: DEBUG oslo_vmware.api [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5245b371-adbb-9b69-eaf8-66aa2b673fb9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.203376] env[62522]: DEBUG nova.network.neutron [-] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.224317] env[62522]: DEBUG nova.network.neutron [-] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.328329] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8496cb72-8d74-4e39-95bb-85e37dda232f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.340232] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86f4b02a-db88-472e-b43e-bdb1f9721e09 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.354090] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-84519386-8547-49b8-bed5-48fae0d76141 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.375443] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbc68823-3ce8-4cce-9800-38b463c15a54 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.400544] env[62522]: DEBUG nova.compute.manager [req-b2dfa4d8-cd0a-41df-99c0-bfcb0a5bba3f req-bd8f5d94-4eb8-466b-a985-32d6de07befe service nova] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Detach interface failed, port_id=18f42e25-7b00-475d-8f2f-b150679eeeef, reason: Instance 4e9436df-c86b-429b-abc2-97f760858055 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1041.429068] env[62522]: DEBUG nova.compute.manager [req-d4430e2a-10d2-47d0-977d-0079980d2310 req-d81a08cd-1fda-481f-93dc-96c743b52130 service nova] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Detach interface failed, port_id=b498b6ac-e4c6-46e5-89f4-804206a74bcf, reason: Instance 97f4c6ab-04de-4069-8ce0-1509c30ffb0f could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1041.509678] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.450s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.509894] env[62522]: INFO nova.compute.manager [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Migrating [ 1041.516666] env[62522]: DEBUG oslo_concurrency.lockutils [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.339s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.516893] env[62522]: DEBUG nova.objects.instance [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lazy-loading 'pci_requests' on Instance uuid 04a9d357-d094-487b-8f09-2f7e0c35f0d7 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1041.651652] env[62522]: DEBUG oslo_vmware.api [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5245b371-adbb-9b69-eaf8-66aa2b673fb9, 'name': SearchDatastore_Task, 'duration_secs': 0.018744} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.652165] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1041.652244] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 1a5a235a-477f-4da5-b5c1-ee057211cce8/1a5a235a-477f-4da5-b5c1-ee057211cce8.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1041.652489] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-38912a7f-dcc4-4572-b38d-5789de54e528 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.661486] env[62522]: DEBUG oslo_vmware.api [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 1041.661486] env[62522]: value = "task-2416003" [ 1041.661486] env[62522]: _type = "Task" [ 1041.661486] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.671159] env[62522]: DEBUG oslo_vmware.api [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2416003, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.693076] env[62522]: DEBUG oslo_concurrency.lockutils [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "interface-fcd0eef6-d059-4495-a982-058b6c9626d1-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.693373] env[62522]: DEBUG oslo_concurrency.lockutils [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "interface-fcd0eef6-d059-4495-a982-058b6c9626d1-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.693783] env[62522]: DEBUG nova.objects.instance [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lazy-loading 'flavor' on Instance uuid fcd0eef6-d059-4495-a982-058b6c9626d1 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1041.706443] env[62522]: INFO nova.compute.manager [-] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Took 1.35 seconds to deallocate network for instance. [ 1041.727344] env[62522]: INFO nova.compute.manager [-] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Took 1.32 seconds to deallocate network for instance. [ 1042.028607] env[62522]: DEBUG nova.objects.instance [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lazy-loading 'numa_topology' on Instance uuid 04a9d357-d094-487b-8f09-2f7e0c35f0d7 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1042.030330] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "refresh_cache-cabe40a0-8bd0-4d77-b949-298bd194fa42" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1042.030512] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquired lock "refresh_cache-cabe40a0-8bd0-4d77-b949-298bd194fa42" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1042.030719] env[62522]: DEBUG nova.network.neutron [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1042.175942] env[62522]: DEBUG oslo_vmware.api [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2416003, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.213452] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.233999] env[62522]: DEBUG oslo_concurrency.lockutils [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.337026] env[62522]: DEBUG nova.objects.instance [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lazy-loading 'pci_requests' on Instance uuid fcd0eef6-d059-4495-a982-058b6c9626d1 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1042.407138] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1042.407485] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d2850f86-7eb1-4f7c-9ad3-e217b5ccbd1f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.416934] env[62522]: DEBUG oslo_vmware.api [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1042.416934] env[62522]: value = "task-2416004" [ 1042.416934] env[62522]: _type = "Task" [ 1042.416934] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.426662] env[62522]: DEBUG oslo_vmware.api [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2416004, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.534259] env[62522]: INFO nova.compute.claims [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1042.682587] env[62522]: DEBUG oslo_vmware.api [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2416003, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.548805} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.685290] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 1a5a235a-477f-4da5-b5c1-ee057211cce8/1a5a235a-477f-4da5-b5c1-ee057211cce8.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1042.685578] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1042.685872] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9ab0f0f2-6d59-49cd-b191-4e920c749ae1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.694745] env[62522]: DEBUG oslo_vmware.api [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 1042.694745] env[62522]: value = "task-2416005" [ 1042.694745] env[62522]: _type = "Task" [ 1042.694745] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.706376] env[62522]: DEBUG oslo_vmware.api [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2416005, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.826533] env[62522]: DEBUG nova.network.neutron [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Updating instance_info_cache with network_info: [{"id": "9e10cc19-76da-49d9-80b6-068ce128a1b0", "address": "fa:16:3e:3f:35:de", "network": {"id": "70e81afa-0eda-49c5-b072-e79b3c287468", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1715169983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff5da278d2be4ca983424c8291beadec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7654928b-7afe-42e3-a18d-68ecc775cefe", "external-id": "cl2-zone-807", "segmentation_id": 807, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e10cc19-76", "ovs_interfaceid": "9e10cc19-76da-49d9-80b6-068ce128a1b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.839501] env[62522]: DEBUG nova.objects.base [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62522) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1042.839700] env[62522]: DEBUG nova.network.neutron [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1042.880709] env[62522]: DEBUG nova.policy [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab5e5a8e6ee64aad8d52342ee3f5af36', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4bdd1f5caf09454d808bcdc15df2d3a7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1042.928756] env[62522]: DEBUG oslo_vmware.api [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2416004, 'name': PowerOffVM_Task, 'duration_secs': 0.342222} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.929048] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1042.930950] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-601e8257-c30b-4bde-914c-784e4b5e4005 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.952106] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-084bc630-d844-4401-be42-d7d1e876acfd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.986772] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1042.986772] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3123fc27-fdce-4f22-a25b-1164f056ea14 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.995152] env[62522]: DEBUG oslo_vmware.api [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1042.995152] env[62522]: value = "task-2416006" [ 1042.995152] env[62522]: _type = "Task" [ 1042.995152] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.004141] env[62522]: DEBUG oslo_vmware.api [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2416006, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.207454] env[62522]: DEBUG oslo_vmware.api [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2416005, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.114378} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.207454] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1043.207454] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd0d0117-207f-4175-afbd-ce518731e73d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.230924] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 1a5a235a-477f-4da5-b5c1-ee057211cce8/1a5a235a-477f-4da5-b5c1-ee057211cce8.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1043.232037] env[62522]: DEBUG nova.network.neutron [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Successfully created port: 0ac91806-75b5-459d-8243-019320a7daf0 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1043.234299] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-443a7f3b-c4e6-4fd8-84e7-476060912d32 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.256418] env[62522]: DEBUG oslo_vmware.api [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 1043.256418] env[62522]: value = "task-2416007" [ 1043.256418] env[62522]: _type = "Task" [ 1043.256418] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.266780] env[62522]: DEBUG oslo_vmware.api [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2416007, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.331055] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Releasing lock "refresh_cache-cabe40a0-8bd0-4d77-b949-298bd194fa42" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1043.509809] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] VM already powered off {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1043.510058] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1043.510314] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1043.510462] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.510892] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1043.511206] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1fc1885-894a-461c-8287-578e90eecd63 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.520939] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1043.521161] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1043.522018] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e870308-b4f8-4f30-9f5f-615a23180582 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.528247] env[62522]: DEBUG oslo_vmware.api [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1043.528247] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5275378b-9a73-2470-d779-4857f474c0a1" [ 1043.528247] env[62522]: _type = "Task" [ 1043.528247] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.537861] env[62522]: DEBUG oslo_vmware.api [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5275378b-9a73-2470-d779-4857f474c0a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.603360] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1175fbd7-c8e0-40b1-b224-b87a8490587c tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquiring lock "3c4c395c-0625-4569-990d-e2d4ad162c14" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.603630] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1175fbd7-c8e0-40b1-b224-b87a8490587c tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "3c4c395c-0625-4569-990d-e2d4ad162c14" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.767825] env[62522]: DEBUG oslo_vmware.api [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2416007, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.837035] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3e625c6-6e19-4f9b-9630-6965d309fd86 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.845702] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8dfa95b-ff34-450e-95a1-949a6eb0fd4d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.878388] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d5e4107-7f04-4182-9c3a-46ed880c69b9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.887828] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33d9da15-6f13-44e4-875a-72353ef8ec44 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.904095] env[62522]: DEBUG nova.compute.provider_tree [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1044.041316] env[62522]: DEBUG oslo_vmware.api [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5275378b-9a73-2470-d779-4857f474c0a1, 'name': SearchDatastore_Task, 'duration_secs': 0.035019} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.042159] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-585fa799-a983-41f3-9caf-21ad46829f7e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.048355] env[62522]: DEBUG oslo_vmware.api [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1044.048355] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5263e76b-26fc-d91a-a903-abfdc91fe7ba" [ 1044.048355] env[62522]: _type = "Task" [ 1044.048355] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.057554] env[62522]: DEBUG oslo_vmware.api [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5263e76b-26fc-d91a-a903-abfdc91fe7ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.108655] env[62522]: INFO nova.compute.manager [None req-1175fbd7-c8e0-40b1-b224-b87a8490587c tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Detaching volume 1f2482a0-4337-44cc-a788-681ae00783ea [ 1044.143544] env[62522]: INFO nova.virt.block_device [None req-1175fbd7-c8e0-40b1-b224-b87a8490587c tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Attempting to driver detach volume 1f2482a0-4337-44cc-a788-681ae00783ea from mountpoint /dev/sdb [ 1044.143809] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-1175fbd7-c8e0-40b1-b224-b87a8490587c tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Volume detach. Driver type: vmdk {{(pid=62522) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1044.144037] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-1175fbd7-c8e0-40b1-b224-b87a8490587c tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489776', 'volume_id': '1f2482a0-4337-44cc-a788-681ae00783ea', 'name': 'volume-1f2482a0-4337-44cc-a788-681ae00783ea', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3c4c395c-0625-4569-990d-e2d4ad162c14', 'attached_at': '', 'detached_at': '', 'volume_id': '1f2482a0-4337-44cc-a788-681ae00783ea', 'serial': '1f2482a0-4337-44cc-a788-681ae00783ea'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1044.144886] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b00215c9-cac8-4dc6-ab22-f1ed1f81c94d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.167962] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d69cbc-22cc-4860-9e95-83e2c8c11b04 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.176367] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf6351b-0212-4b00-91ac-a3c64c1c0f7f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.196459] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73df29be-7a27-40fd-8dd1-520bd1a98822 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.210550] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-1175fbd7-c8e0-40b1-b224-b87a8490587c tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] The volume has not been displaced from its original location: [datastore1] volume-1f2482a0-4337-44cc-a788-681ae00783ea/volume-1f2482a0-4337-44cc-a788-681ae00783ea.vmdk. No consolidation needed. {{(pid=62522) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1044.215733] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-1175fbd7-c8e0-40b1-b224-b87a8490587c tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Reconfiguring VM instance instance-0000002d to detach disk 2001 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1044.215994] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c9f57b57-f1a0-4213-b3c6-2521f0598fd6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.233296] env[62522]: DEBUG oslo_vmware.api [None req-1175fbd7-c8e0-40b1-b224-b87a8490587c tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 1044.233296] env[62522]: value = "task-2416008" [ 1044.233296] env[62522]: _type = "Task" [ 1044.233296] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.240614] env[62522]: DEBUG oslo_vmware.api [None req-1175fbd7-c8e0-40b1-b224-b87a8490587c tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416008, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.269606] env[62522]: DEBUG oslo_vmware.api [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2416007, 'name': ReconfigVM_Task, 'duration_secs': 0.519844} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.269917] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 1a5a235a-477f-4da5-b5c1-ee057211cce8/1a5a235a-477f-4da5-b5c1-ee057211cce8.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1044.270577] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c206b60a-61a2-452a-8eed-7583d57131ab {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.277971] env[62522]: DEBUG oslo_vmware.api [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 1044.277971] env[62522]: value = "task-2416009" [ 1044.277971] env[62522]: _type = "Task" [ 1044.277971] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.286341] env[62522]: DEBUG oslo_vmware.api [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2416009, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.407122] env[62522]: DEBUG nova.scheduler.client.report [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1044.561296] env[62522]: DEBUG oslo_vmware.api [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5263e76b-26fc-d91a-a903-abfdc91fe7ba, 'name': SearchDatastore_Task, 'duration_secs': 0.010099} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.561578] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1044.561838] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 5c9b1120-84ad-48d5-8cd4-0cf387963066/2ee4561b-ba48-4f45-82f6-eac89be98290-rescue.vmdk. {{(pid=62522) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1044.562114] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b0a82b2f-a2d5-46d2-8c0c-a193cd019bad {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.569240] env[62522]: DEBUG oslo_vmware.api [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1044.569240] env[62522]: value = "task-2416010" [ 1044.569240] env[62522]: _type = "Task" [ 1044.569240] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.577692] env[62522]: DEBUG oslo_vmware.api [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2416010, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.743710] env[62522]: DEBUG oslo_vmware.api [None req-1175fbd7-c8e0-40b1-b224-b87a8490587c tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416008, 'name': ReconfigVM_Task, 'duration_secs': 0.23563} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.744034] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-1175fbd7-c8e0-40b1-b224-b87a8490587c tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Reconfigured VM instance instance-0000002d to detach disk 2001 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1044.750115] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-651768f0-908d-4ee0-b71d-89d7e2994243 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.766975] env[62522]: DEBUG oslo_vmware.api [None req-1175fbd7-c8e0-40b1-b224-b87a8490587c tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 1044.766975] env[62522]: value = "task-2416011" [ 1044.766975] env[62522]: _type = "Task" [ 1044.766975] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.775678] env[62522]: DEBUG oslo_vmware.api [None req-1175fbd7-c8e0-40b1-b224-b87a8490587c tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416011, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.789034] env[62522]: DEBUG oslo_vmware.api [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2416009, 'name': Rename_Task, 'duration_secs': 0.204064} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.789034] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1044.789034] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7cad3943-d4ab-4c95-931f-697cc15c8ca3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.796334] env[62522]: DEBUG oslo_vmware.api [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 1044.796334] env[62522]: value = "task-2416012" [ 1044.796334] env[62522]: _type = "Task" [ 1044.796334] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.804960] env[62522]: DEBUG oslo_vmware.api [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2416012, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.807287] env[62522]: DEBUG nova.compute.manager [req-066ba39f-33de-4436-8c96-fccf51579e53 req-e5e446c8-d78f-4396-be8e-39a964c56714 service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Received event network-vif-plugged-0ac91806-75b5-459d-8243-019320a7daf0 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1044.807569] env[62522]: DEBUG oslo_concurrency.lockutils [req-066ba39f-33de-4436-8c96-fccf51579e53 req-e5e446c8-d78f-4396-be8e-39a964c56714 service nova] Acquiring lock "fcd0eef6-d059-4495-a982-058b6c9626d1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.807795] env[62522]: DEBUG oslo_concurrency.lockutils [req-066ba39f-33de-4436-8c96-fccf51579e53 req-e5e446c8-d78f-4396-be8e-39a964c56714 service nova] Lock "fcd0eef6-d059-4495-a982-058b6c9626d1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.807980] env[62522]: DEBUG oslo_concurrency.lockutils [req-066ba39f-33de-4436-8c96-fccf51579e53 req-e5e446c8-d78f-4396-be8e-39a964c56714 service nova] Lock "fcd0eef6-d059-4495-a982-058b6c9626d1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.808196] env[62522]: DEBUG nova.compute.manager [req-066ba39f-33de-4436-8c96-fccf51579e53 req-e5e446c8-d78f-4396-be8e-39a964c56714 service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] No waiting events found dispatching network-vif-plugged-0ac91806-75b5-459d-8243-019320a7daf0 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1044.808334] env[62522]: WARNING nova.compute.manager [req-066ba39f-33de-4436-8c96-fccf51579e53 req-e5e446c8-d78f-4396-be8e-39a964c56714 service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Received unexpected event network-vif-plugged-0ac91806-75b5-459d-8243-019320a7daf0 for instance with vm_state active and task_state None. [ 1044.850064] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fef078c-171d-4288-8374-be04b5e7e7a2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.882727] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Updating instance 'cabe40a0-8bd0-4d77-b949-298bd194fa42' progress to 0 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1044.913536] env[62522]: DEBUG oslo_concurrency.lockutils [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.396s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.915945] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.765s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.916228] env[62522]: DEBUG nova.objects.instance [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lazy-loading 'resources' on Instance uuid 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1044.959461] env[62522]: INFO nova.network.neutron [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Updating port 7e36641e-fc4a-4223-ab07-33dc49821168 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1044.969818] env[62522]: DEBUG nova.network.neutron [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Successfully updated port: 0ac91806-75b5-459d-8243-019320a7daf0 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1045.080505] env[62522]: DEBUG oslo_vmware.api [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2416010, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.277280] env[62522]: DEBUG oslo_vmware.api [None req-1175fbd7-c8e0-40b1-b224-b87a8490587c tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416011, 'name': ReconfigVM_Task, 'duration_secs': 0.180559} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.277588] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-1175fbd7-c8e0-40b1-b224-b87a8490587c tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489776', 'volume_id': '1f2482a0-4337-44cc-a788-681ae00783ea', 'name': 'volume-1f2482a0-4337-44cc-a788-681ae00783ea', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3c4c395c-0625-4569-990d-e2d4ad162c14', 'attached_at': '', 'detached_at': '', 'volume_id': '1f2482a0-4337-44cc-a788-681ae00783ea', 'serial': '1f2482a0-4337-44cc-a788-681ae00783ea'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1045.306116] env[62522]: DEBUG oslo_vmware.api [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2416012, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.393146] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1045.393468] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-56c6e683-833f-45fb-af0b-73ee4dc414a3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.401816] env[62522]: DEBUG oslo_vmware.api [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 1045.401816] env[62522]: value = "task-2416013" [ 1045.401816] env[62522]: _type = "Task" [ 1045.401816] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.414040] env[62522]: DEBUG oslo_vmware.api [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2416013, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.472962] env[62522]: DEBUG oslo_concurrency.lockutils [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "refresh_cache-fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1045.473193] env[62522]: DEBUG oslo_concurrency.lockutils [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired lock "refresh_cache-fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.473370] env[62522]: DEBUG nova.network.neutron [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1045.584305] env[62522]: DEBUG oslo_vmware.api [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2416010, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.558976} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.584457] env[62522]: INFO nova.virt.vmwareapi.ds_util [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 5c9b1120-84ad-48d5-8cd4-0cf387963066/2ee4561b-ba48-4f45-82f6-eac89be98290-rescue.vmdk. [ 1045.585218] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-856b41ec-7f68-4c91-b93b-6a5ed5f62b6f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.612722] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] 5c9b1120-84ad-48d5-8cd4-0cf387963066/2ee4561b-ba48-4f45-82f6-eac89be98290-rescue.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1045.613103] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c863a59-4738-41de-ab40-d39321163348 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.637236] env[62522]: DEBUG oslo_vmware.api [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1045.637236] env[62522]: value = "task-2416014" [ 1045.637236] env[62522]: _type = "Task" [ 1045.637236] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.648418] env[62522]: DEBUG oslo_vmware.api [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2416014, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.733967] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f606f3fe-f4c2-4802-9986-077383f449e5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.741696] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01853453-e2c9-4d88-bd84-9747685f78b4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.771461] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd9313c1-da4e-4a40-8395-851c493599df {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.779050] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-131ce5f7-468d-4c97-a430-e5d16de1fb8a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.794274] env[62522]: DEBUG nova.compute.provider_tree [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1045.808746] env[62522]: DEBUG oslo_vmware.api [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2416012, 'name': PowerOnVM_Task, 'duration_secs': 0.651712} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.809126] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1045.809422] env[62522]: INFO nova.compute.manager [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Took 8.73 seconds to spawn the instance on the hypervisor. [ 1045.810044] env[62522]: DEBUG nova.compute.manager [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1045.810824] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dab52f9d-f9b1-464f-ae9a-815dd1b04f35 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.821060] env[62522]: DEBUG nova.objects.instance [None req-1175fbd7-c8e0-40b1-b224-b87a8490587c tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lazy-loading 'flavor' on Instance uuid 3c4c395c-0625-4569-990d-e2d4ad162c14 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1045.911991] env[62522]: DEBUG oslo_vmware.api [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2416013, 'name': PowerOffVM_Task, 'duration_secs': 0.229281} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.912282] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1045.912511] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Updating instance 'cabe40a0-8bd0-4d77-b949-298bd194fa42' progress to 17 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1046.029446] env[62522]: WARNING nova.network.neutron [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] be0fe686-4986-439e-aa82-5cbe54104c8a already exists in list: networks containing: ['be0fe686-4986-439e-aa82-5cbe54104c8a']. ignoring it [ 1046.150356] env[62522]: DEBUG oslo_vmware.api [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2416014, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.297650] env[62522]: DEBUG nova.scheduler.client.report [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1046.335934] env[62522]: INFO nova.compute.manager [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Took 15.20 seconds to build instance. [ 1046.401766] env[62522]: DEBUG nova.network.neutron [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Updating instance_info_cache with network_info: [{"id": "954fee91-36f2-497a-a856-6828a519a456", "address": "fa:16:3e:df:f4:48", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.136", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap954fee91-36", "ovs_interfaceid": "954fee91-36f2-497a-a856-6828a519a456", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0ac91806-75b5-459d-8243-019320a7daf0", "address": "fa:16:3e:d2:6b:0e", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ac91806-75", "ovs_interfaceid": "0ac91806-75b5-459d-8243-019320a7daf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.418503] env[62522]: DEBUG nova.virt.hardware [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:04Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1046.418835] env[62522]: DEBUG nova.virt.hardware [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1046.419011] env[62522]: DEBUG nova.virt.hardware [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1046.419203] env[62522]: DEBUG nova.virt.hardware [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1046.419423] env[62522]: DEBUG nova.virt.hardware [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1046.419674] env[62522]: DEBUG nova.virt.hardware [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1046.419793] env[62522]: DEBUG nova.virt.hardware [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1046.419939] env[62522]: DEBUG nova.virt.hardware [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1046.420120] env[62522]: DEBUG nova.virt.hardware [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1046.420284] env[62522]: DEBUG nova.virt.hardware [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1046.420564] env[62522]: DEBUG nova.virt.hardware [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1046.426631] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b39b5569-2e71-43f2-a038-24f4b35668e6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.446028] env[62522]: DEBUG oslo_vmware.api [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 1046.446028] env[62522]: value = "task-2416015" [ 1046.446028] env[62522]: _type = "Task" [ 1046.446028] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.457404] env[62522]: DEBUG oslo_vmware.api [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2416015, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.458200] env[62522]: DEBUG oslo_concurrency.lockutils [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquiring lock "refresh_cache-04a9d357-d094-487b-8f09-2f7e0c35f0d7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1046.458412] env[62522]: DEBUG oslo_concurrency.lockutils [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquired lock "refresh_cache-04a9d357-d094-487b-8f09-2f7e0c35f0d7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.458584] env[62522]: DEBUG nova.network.neutron [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1046.479816] env[62522]: DEBUG nova.compute.manager [req-2219a49f-f0db-47cd-b3b0-a0305ce17a4f req-c0a80ebc-f9c0-4c50-8631-dec1ab604629 service nova] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Received event network-vif-plugged-7e36641e-fc4a-4223-ab07-33dc49821168 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1046.480077] env[62522]: DEBUG oslo_concurrency.lockutils [req-2219a49f-f0db-47cd-b3b0-a0305ce17a4f req-c0a80ebc-f9c0-4c50-8631-dec1ab604629 service nova] Acquiring lock "04a9d357-d094-487b-8f09-2f7e0c35f0d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.480427] env[62522]: DEBUG oslo_concurrency.lockutils [req-2219a49f-f0db-47cd-b3b0-a0305ce17a4f req-c0a80ebc-f9c0-4c50-8631-dec1ab604629 service nova] Lock "04a9d357-d094-487b-8f09-2f7e0c35f0d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.480662] env[62522]: DEBUG oslo_concurrency.lockutils [req-2219a49f-f0db-47cd-b3b0-a0305ce17a4f req-c0a80ebc-f9c0-4c50-8631-dec1ab604629 service nova] Lock "04a9d357-d094-487b-8f09-2f7e0c35f0d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1046.480912] env[62522]: DEBUG nova.compute.manager [req-2219a49f-f0db-47cd-b3b0-a0305ce17a4f req-c0a80ebc-f9c0-4c50-8631-dec1ab604629 service nova] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] No waiting events found dispatching network-vif-plugged-7e36641e-fc4a-4223-ab07-33dc49821168 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1046.481209] env[62522]: WARNING nova.compute.manager [req-2219a49f-f0db-47cd-b3b0-a0305ce17a4f req-c0a80ebc-f9c0-4c50-8631-dec1ab604629 service nova] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Received unexpected event network-vif-plugged-7e36641e-fc4a-4223-ab07-33dc49821168 for instance with vm_state shelved_offloaded and task_state spawning. [ 1046.650728] env[62522]: DEBUG oslo_vmware.api [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2416014, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.793730] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8370e1ec-25a0-4c3a-83e6-7f9ad8bfc73b tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquiring lock "3c4c395c-0625-4569-990d-e2d4ad162c14" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.803863] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.888s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1046.806410] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.884s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.806656] env[62522]: DEBUG nova.objects.instance [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Lazy-loading 'resources' on Instance uuid a10c4dee-4490-445a-bea2-9f8ef5425d15 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1046.828913] env[62522]: INFO nova.scheduler.client.report [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Deleted allocations for instance 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc [ 1046.838014] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1175fbd7-c8e0-40b1-b224-b87a8490587c tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "3c4c395c-0625-4569-990d-e2d4ad162c14" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.234s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1046.838988] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ac2e6947-b2ec-42c0-85b8-426493256009 tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "1a5a235a-477f-4da5-b5c1-ee057211cce8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.730s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1046.839434] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8370e1ec-25a0-4c3a-83e6-7f9ad8bfc73b tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "3c4c395c-0625-4569-990d-e2d4ad162c14" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.046s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.839640] env[62522]: DEBUG nova.compute.manager [None req-8370e1ec-25a0-4c3a-83e6-7f9ad8bfc73b tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1046.840831] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75a24eb0-ec2c-49b9-bc92-6e39cd36b86a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.850539] env[62522]: DEBUG nova.compute.manager [None req-8370e1ec-25a0-4c3a-83e6-7f9ad8bfc73b tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62522) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1046.851163] env[62522]: DEBUG nova.objects.instance [None req-8370e1ec-25a0-4c3a-83e6-7f9ad8bfc73b tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lazy-loading 'flavor' on Instance uuid 3c4c395c-0625-4569-990d-e2d4ad162c14 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1046.904963] env[62522]: DEBUG oslo_concurrency.lockutils [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Releasing lock "refresh_cache-fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1046.906033] env[62522]: DEBUG oslo_concurrency.lockutils [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1046.906033] env[62522]: DEBUG oslo_concurrency.lockutils [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired lock "fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.906673] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6077b5e8-b7f2-4431-90e4-215f165110ab {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.925224] env[62522]: DEBUG nova.virt.hardware [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1046.925802] env[62522]: DEBUG nova.virt.hardware [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1046.925802] env[62522]: DEBUG nova.virt.hardware [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1046.925802] env[62522]: DEBUG nova.virt.hardware [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1046.925969] env[62522]: DEBUG nova.virt.hardware [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1046.926083] env[62522]: DEBUG nova.virt.hardware [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1046.926296] env[62522]: DEBUG nova.virt.hardware [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1046.926453] env[62522]: DEBUG nova.virt.hardware [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1046.926618] env[62522]: DEBUG nova.virt.hardware [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1046.926778] env[62522]: DEBUG nova.virt.hardware [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1046.926949] env[62522]: DEBUG nova.virt.hardware [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1046.933200] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Reconfiguring VM to attach interface {{(pid=62522) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1046.933857] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9373a35e-5bfe-4a18-ad26-758738090c5b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.953343] env[62522]: DEBUG oslo_vmware.api [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 1046.953343] env[62522]: value = "task-2416016" [ 1046.953343] env[62522]: _type = "Task" [ 1046.953343] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.957037] env[62522]: DEBUG oslo_vmware.api [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2416015, 'name': ReconfigVM_Task, 'duration_secs': 0.190369} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.959746] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Updating instance 'cabe40a0-8bd0-4d77-b949-298bd194fa42' progress to 33 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1046.970359] env[62522]: DEBUG oslo_vmware.api [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416016, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.151152] env[62522]: DEBUG oslo_vmware.api [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2416014, 'name': ReconfigVM_Task, 'duration_secs': 1.176759} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.151488] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Reconfigured VM instance instance-00000058 to attach disk [datastore1] 5c9b1120-84ad-48d5-8cd4-0cf387963066/2ee4561b-ba48-4f45-82f6-eac89be98290-rescue.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1047.152416] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec406fe3-4d45-4a40-b109-71cedf21b74c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.181117] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-21f938f7-985d-4a44-9191-a4ab40a30c90 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.197276] env[62522]: DEBUG nova.network.neutron [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Updating instance_info_cache with network_info: [{"id": "7e36641e-fc4a-4223-ab07-33dc49821168", "address": "fa:16:3e:f1:bf:49", "network": {"id": "b837f0fb-c2e1-46dd-93b2-62d6c4352316", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1813744063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed393a0454b643eea75c203d1dfd592c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e36641e-fc", "ovs_interfaceid": "7e36641e-fc4a-4223-ab07-33dc49821168", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1047.199788] env[62522]: DEBUG oslo_vmware.api [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1047.199788] env[62522]: value = "task-2416017" [ 1047.199788] env[62522]: _type = "Task" [ 1047.199788] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.209287] env[62522]: DEBUG oslo_vmware.api [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2416017, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.284520] env[62522]: DEBUG nova.compute.manager [req-3812ad56-cfea-4bb2-9d13-1d9a6b0bd686 req-a4d92ad5-5a63-4dd2-b156-cbd3a5aacea6 service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Received event network-changed-0ac91806-75b5-459d-8243-019320a7daf0 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1047.284733] env[62522]: DEBUG nova.compute.manager [req-3812ad56-cfea-4bb2-9d13-1d9a6b0bd686 req-a4d92ad5-5a63-4dd2-b156-cbd3a5aacea6 service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Refreshing instance network info cache due to event network-changed-0ac91806-75b5-459d-8243-019320a7daf0. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1047.284950] env[62522]: DEBUG oslo_concurrency.lockutils [req-3812ad56-cfea-4bb2-9d13-1d9a6b0bd686 req-a4d92ad5-5a63-4dd2-b156-cbd3a5aacea6 service nova] Acquiring lock "refresh_cache-fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1047.285105] env[62522]: DEBUG oslo_concurrency.lockutils [req-3812ad56-cfea-4bb2-9d13-1d9a6b0bd686 req-a4d92ad5-5a63-4dd2-b156-cbd3a5aacea6 service nova] Acquired lock "refresh_cache-fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.285267] env[62522]: DEBUG nova.network.neutron [req-3812ad56-cfea-4bb2-9d13-1d9a6b0bd686 req-a4d92ad5-5a63-4dd2-b156-cbd3a5aacea6 service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Refreshing network info cache for port 0ac91806-75b5-459d-8243-019320a7daf0 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1047.339076] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3898a51f-bffe-4734-addb-a716c1eca0d7 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "3b2cd0b6-0c7a-411c-a7f5-64835f2179dc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.692s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1047.471358] env[62522]: DEBUG nova.virt.hardware [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1047.471561] env[62522]: DEBUG nova.virt.hardware [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1047.471769] env[62522]: DEBUG nova.virt.hardware [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1047.472017] env[62522]: DEBUG nova.virt.hardware [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1047.472191] env[62522]: DEBUG nova.virt.hardware [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1047.472369] env[62522]: DEBUG nova.virt.hardware [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1047.472859] env[62522]: DEBUG nova.virt.hardware [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1047.473079] env[62522]: DEBUG nova.virt.hardware [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1047.473289] env[62522]: DEBUG nova.virt.hardware [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1047.473482] env[62522]: DEBUG nova.virt.hardware [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1047.473670] env[62522]: DEBUG nova.virt.hardware [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1047.479623] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Reconfiguring VM instance instance-00000056 to detach disk 2000 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1047.479780] env[62522]: DEBUG oslo_vmware.api [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416016, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.482909] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9539aa0-4787-4a60-be75-ff7841ad7592 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.504217] env[62522]: DEBUG oslo_vmware.api [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 1047.504217] env[62522]: value = "task-2416018" [ 1047.504217] env[62522]: _type = "Task" [ 1047.504217] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.516936] env[62522]: DEBUG oslo_vmware.api [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2416018, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.628329] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ae26ba-4674-4e7d-840d-493258782dac {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.638660] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dd65289-f329-4c06-91e3-3ff69adf9630 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.674324] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a39ad91-762e-459d-b3ef-eca97e184d1b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.685535] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d86d2878-575e-42f3-87ab-94ad742e32dc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.703916] env[62522]: DEBUG oslo_concurrency.lockutils [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Releasing lock "refresh_cache-04a9d357-d094-487b-8f09-2f7e0c35f0d7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1047.706375] env[62522]: DEBUG nova.compute.provider_tree [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1047.719350] env[62522]: DEBUG oslo_vmware.api [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2416017, 'name': ReconfigVM_Task, 'duration_secs': 0.180834} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.719655] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1047.719916] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a6f18827-2adb-48f7-89c5-b513142544e5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.727751] env[62522]: DEBUG oslo_vmware.api [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1047.727751] env[62522]: value = "task-2416019" [ 1047.727751] env[62522]: _type = "Task" [ 1047.727751] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.733088] env[62522]: DEBUG nova.virt.hardware [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='17ea371b8a718154d20eed5d4f47e7bb',container_format='bare',created_at=2025-02-10T12:25:16Z,direct_url=,disk_format='vmdk',id=f9ed5f31-86db-46dd-b6c8-bd2cbb2f7fe8,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-1886771056-shelved',owner='ed393a0454b643eea75c203d1dfd592c',properties=ImageMetaProps,protected=,size=31591424,status='active',tags=,updated_at=2025-02-10T12:25:35Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1047.733325] env[62522]: DEBUG nova.virt.hardware [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1047.733486] env[62522]: DEBUG nova.virt.hardware [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1047.733670] env[62522]: DEBUG nova.virt.hardware [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1047.733836] env[62522]: DEBUG nova.virt.hardware [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1047.734007] env[62522]: DEBUG nova.virt.hardware [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1047.734225] env[62522]: DEBUG nova.virt.hardware [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1047.734388] env[62522]: DEBUG nova.virt.hardware [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1047.734573] env[62522]: DEBUG nova.virt.hardware [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1047.734737] env[62522]: DEBUG nova.virt.hardware [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1047.734949] env[62522]: DEBUG nova.virt.hardware [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1047.735972] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a90a10c0-c528-486f-a2e4-c986499c1a9b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.744754] env[62522]: DEBUG oslo_vmware.api [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2416019, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.747907] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ded819a-15f4-4c66-a0d9-1fe096a8621b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.765729] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:bf:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '09bf081b-cdf0-4977-abe2-2339a87409ab', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e36641e-fc4a-4223-ab07-33dc49821168', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1047.773308] env[62522]: DEBUG oslo.service.loopingcall [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1047.773626] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1047.773883] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1d105544-bd01-4bad-824a-a190051e59c6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.799266] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1047.799266] env[62522]: value = "task-2416020" [ 1047.799266] env[62522]: _type = "Task" [ 1047.799266] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.807879] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416020, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.859572] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8370e1ec-25a0-4c3a-83e6-7f9ad8bfc73b tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1047.859749] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-070ff4b1-57f4-4a82-8aba-7e6d12c97c83 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.869223] env[62522]: DEBUG oslo_vmware.api [None req-8370e1ec-25a0-4c3a-83e6-7f9ad8bfc73b tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 1047.869223] env[62522]: value = "task-2416021" [ 1047.869223] env[62522]: _type = "Task" [ 1047.869223] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.879039] env[62522]: DEBUG oslo_vmware.api [None req-8370e1ec-25a0-4c3a-83e6-7f9ad8bfc73b tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416021, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.970603] env[62522]: DEBUG oslo_vmware.api [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416016, 'name': ReconfigVM_Task, 'duration_secs': 0.743301} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.970603] env[62522]: DEBUG oslo_concurrency.lockutils [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Releasing lock "fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1047.970603] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Reconfigured VM to attach interface {{(pid=62522) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1048.015939] env[62522]: DEBUG oslo_vmware.api [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2416018, 'name': ReconfigVM_Task, 'duration_secs': 0.189693} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.017253] env[62522]: DEBUG nova.network.neutron [req-3812ad56-cfea-4bb2-9d13-1d9a6b0bd686 req-a4d92ad5-5a63-4dd2-b156-cbd3a5aacea6 service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Updated VIF entry in instance network info cache for port 0ac91806-75b5-459d-8243-019320a7daf0. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1048.017879] env[62522]: DEBUG nova.network.neutron [req-3812ad56-cfea-4bb2-9d13-1d9a6b0bd686 req-a4d92ad5-5a63-4dd2-b156-cbd3a5aacea6 service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Updating instance_info_cache with network_info: [{"id": "954fee91-36f2-497a-a856-6828a519a456", "address": "fa:16:3e:df:f4:48", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.136", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap954fee91-36", "ovs_interfaceid": "954fee91-36f2-497a-a856-6828a519a456", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0ac91806-75b5-459d-8243-019320a7daf0", "address": "fa:16:3e:d2:6b:0e", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ac91806-75", "ovs_interfaceid": "0ac91806-75b5-459d-8243-019320a7daf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.019201] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Reconfigured VM instance instance-00000056 to detach disk 2000 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1048.021737] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-863adf5b-0380-471f-9a4d-dfbb79b58607 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.047834] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] cabe40a0-8bd0-4d77-b949-298bd194fa42/cabe40a0-8bd0-4d77-b949-298bd194fa42.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1048.049942] env[62522]: DEBUG nova.compute.manager [None req-210a8bb7-8817-4b7b-8372-c39bf9d22e8b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1048.050249] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c71567b8-ce7a-483d-bc8a-a0150fae4dd4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.064686] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a104096-276b-4a6b-80a7-698cb2775457 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.069659] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fc3250d2-b4d8-4ed7-9d60-363bee15c7a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "02708991-7f71-408e-89d8-932b845553d1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1048.069899] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fc3250d2-b4d8-4ed7-9d60-363bee15c7a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "02708991-7f71-408e-89d8-932b845553d1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1048.070560] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fc3250d2-b4d8-4ed7-9d60-363bee15c7a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "02708991-7f71-408e-89d8-932b845553d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1048.070560] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fc3250d2-b4d8-4ed7-9d60-363bee15c7a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "02708991-7f71-408e-89d8-932b845553d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1048.070560] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fc3250d2-b4d8-4ed7-9d60-363bee15c7a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "02708991-7f71-408e-89d8-932b845553d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1048.073238] env[62522]: INFO nova.compute.manager [None req-fc3250d2-b4d8-4ed7-9d60-363bee15c7a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Terminating instance [ 1048.086304] env[62522]: DEBUG oslo_vmware.api [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 1048.086304] env[62522]: value = "task-2416022" [ 1048.086304] env[62522]: _type = "Task" [ 1048.086304] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.099192] env[62522]: DEBUG oslo_vmware.api [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2416022, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.214289] env[62522]: DEBUG nova.scheduler.client.report [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1048.239703] env[62522]: DEBUG oslo_vmware.api [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2416019, 'name': PowerOnVM_Task, 'duration_secs': 0.466216} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.240144] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1048.245150] env[62522]: DEBUG nova.compute.manager [None req-2d5ef20c-f659-4fb2-bc67-bb23d36be461 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1048.246046] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e562571-a3f9-4659-910b-224c08a2306d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.315198] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416020, 'name': CreateVM_Task, 'duration_secs': 0.354385} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.315468] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1048.316592] env[62522]: DEBUG oslo_concurrency.lockutils [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f9ed5f31-86db-46dd-b6c8-bd2cbb2f7fe8" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1048.316592] env[62522]: DEBUG oslo_concurrency.lockutils [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f9ed5f31-86db-46dd-b6c8-bd2cbb2f7fe8" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.317049] env[62522]: DEBUG oslo_concurrency.lockutils [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f9ed5f31-86db-46dd-b6c8-bd2cbb2f7fe8" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1048.317519] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff426672-f17a-4a17-b997-fdb8b530261e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.325969] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 1048.325969] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521aea9a-c7ef-bdc4-99cc-53c2c1e7bf71" [ 1048.325969] env[62522]: _type = "Task" [ 1048.325969] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.338125] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521aea9a-c7ef-bdc4-99cc-53c2c1e7bf71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.379502] env[62522]: DEBUG oslo_vmware.api [None req-8370e1ec-25a0-4c3a-83e6-7f9ad8bfc73b tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416021, 'name': PowerOffVM_Task, 'duration_secs': 0.187858} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.379502] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8370e1ec-25a0-4c3a-83e6-7f9ad8bfc73b tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1048.379746] env[62522]: DEBUG nova.compute.manager [None req-8370e1ec-25a0-4c3a-83e6-7f9ad8bfc73b tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1048.380449] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5c59187-5f49-4bd0-826a-bee3471d84db {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.478609] env[62522]: DEBUG oslo_concurrency.lockutils [None req-be62457d-6c70-4534-935e-3f5fcaa5488c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "interface-fcd0eef6-d059-4495-a982-058b6c9626d1-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.785s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1048.525179] env[62522]: DEBUG oslo_concurrency.lockutils [req-3812ad56-cfea-4bb2-9d13-1d9a6b0bd686 req-a4d92ad5-5a63-4dd2-b156-cbd3a5aacea6 service nova] Releasing lock "refresh_cache-fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1048.577759] env[62522]: DEBUG nova.compute.manager [None req-fc3250d2-b4d8-4ed7-9d60-363bee15c7a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1048.578063] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fc3250d2-b4d8-4ed7-9d60-363bee15c7a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1048.579138] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cedce50b-eb8b-4184-bdab-53a2b76030d1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.590433] env[62522]: INFO nova.compute.manager [None req-210a8bb7-8817-4b7b-8372-c39bf9d22e8b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] instance snapshotting [ 1048.592867] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc3250d2-b4d8-4ed7-9d60-363bee15c7a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1048.596708] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ff62be1e-7e8e-45d0-8d78-a10aded558be {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.598869] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7acd33b5-18a8-40b0-bda8-c58516d6e0f2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.608026] env[62522]: DEBUG oslo_vmware.api [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2416022, 'name': ReconfigVM_Task, 'duration_secs': 0.340184} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.623760] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Reconfigured VM instance instance-00000056 to attach disk [datastore2] cabe40a0-8bd0-4d77-b949-298bd194fa42/cabe40a0-8bd0-4d77-b949-298bd194fa42.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1048.624093] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Updating instance 'cabe40a0-8bd0-4d77-b949-298bd194fa42' progress to 50 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1048.627779] env[62522]: DEBUG oslo_vmware.api [None req-fc3250d2-b4d8-4ed7-9d60-363bee15c7a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1048.627779] env[62522]: value = "task-2416023" [ 1048.627779] env[62522]: _type = "Task" [ 1048.627779] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.629310] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44773357-d533-4950-b905-502eeef4efdc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.644421] env[62522]: DEBUG oslo_vmware.api [None req-fc3250d2-b4d8-4ed7-9d60-363bee15c7a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416023, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.720973] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.914s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1048.723151] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 8.642s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1048.723323] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1048.723479] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62522) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1048.723854] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.511s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1048.723968] env[62522]: DEBUG nova.objects.instance [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lazy-loading 'resources' on Instance uuid 4e9436df-c86b-429b-abc2-97f760858055 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1048.725840] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a00e5c5c-77f1-4a5c-9c1e-3a05dfd0d175 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.736194] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7ab6b08-3547-46bf-b478-71c4ecac602a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.752406] env[62522]: INFO nova.scheduler.client.report [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Deleted allocations for instance a10c4dee-4490-445a-bea2-9f8ef5425d15 [ 1048.753963] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-749b107d-25a0-4bb9-856a-fd3bc3575c3f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.768135] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa1ab136-8bc9-495d-a887-c5f72b4afd62 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.803961] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178434MB free_disk=148GB free_vcpus=48 pci_devices=None {{(pid=62522) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1048.803961] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1048.837124] env[62522]: DEBUG oslo_concurrency.lockutils [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f9ed5f31-86db-46dd-b6c8-bd2cbb2f7fe8" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1048.837311] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Processing image f9ed5f31-86db-46dd-b6c8-bd2cbb2f7fe8 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1048.837495] env[62522]: DEBUG oslo_concurrency.lockutils [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f9ed5f31-86db-46dd-b6c8-bd2cbb2f7fe8/f9ed5f31-86db-46dd-b6c8-bd2cbb2f7fe8.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1048.837650] env[62522]: DEBUG oslo_concurrency.lockutils [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f9ed5f31-86db-46dd-b6c8-bd2cbb2f7fe8/f9ed5f31-86db-46dd-b6c8-bd2cbb2f7fe8.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.837827] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1048.838366] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-76a39ec8-ca59-4fc3-9906-6564f3df1421 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.849644] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1048.849821] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1048.850544] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0136cda-7f30-4118-a20e-e547894e433e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.857989] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 1048.857989] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526ddb01-0f6b-1359-f310-e8068f24e14e" [ 1048.857989] env[62522]: _type = "Task" [ 1048.857989] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.866211] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526ddb01-0f6b-1359-f310-e8068f24e14e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.893739] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8370e1ec-25a0-4c3a-83e6-7f9ad8bfc73b tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "3c4c395c-0625-4569-990d-e2d4ad162c14" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.054s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.047609] env[62522]: DEBUG nova.compute.manager [req-e595a338-0ab3-4d0e-93fd-ddf0879d5e6b req-387e19a7-12df-4d68-a5e4-a8dd970158ed service nova] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Received event network-changed-7e36641e-fc4a-4223-ab07-33dc49821168 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1049.047815] env[62522]: DEBUG nova.compute.manager [req-e595a338-0ab3-4d0e-93fd-ddf0879d5e6b req-387e19a7-12df-4d68-a5e4-a8dd970158ed service nova] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Refreshing instance network info cache due to event network-changed-7e36641e-fc4a-4223-ab07-33dc49821168. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1049.048045] env[62522]: DEBUG oslo_concurrency.lockutils [req-e595a338-0ab3-4d0e-93fd-ddf0879d5e6b req-387e19a7-12df-4d68-a5e4-a8dd970158ed service nova] Acquiring lock "refresh_cache-04a9d357-d094-487b-8f09-2f7e0c35f0d7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1049.048194] env[62522]: DEBUG oslo_concurrency.lockutils [req-e595a338-0ab3-4d0e-93fd-ddf0879d5e6b req-387e19a7-12df-4d68-a5e4-a8dd970158ed service nova] Acquired lock "refresh_cache-04a9d357-d094-487b-8f09-2f7e0c35f0d7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.048357] env[62522]: DEBUG nova.network.neutron [req-e595a338-0ab3-4d0e-93fd-ddf0879d5e6b req-387e19a7-12df-4d68-a5e4-a8dd970158ed service nova] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Refreshing network info cache for port 7e36641e-fc4a-4223-ab07-33dc49821168 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1049.131355] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aec3433d-6721-4193-b60c-a02859d143e7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.143426] env[62522]: DEBUG oslo_vmware.api [None req-fc3250d2-b4d8-4ed7-9d60-363bee15c7a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416023, 'name': PowerOffVM_Task, 'duration_secs': 0.400164} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.156343] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc3250d2-b4d8-4ed7-9d60-363bee15c7a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1049.156543] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fc3250d2-b4d8-4ed7-9d60-363bee15c7a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1049.157537] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-210a8bb7-8817-4b7b-8372-c39bf9d22e8b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Creating Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1049.157904] env[62522]: DEBUG nova.compute.manager [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1049.158240] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1efe04e2-6bd0-4711-b19d-6ab34519bbe3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.159808] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e6d962e9-ed68-477d-9ae4-42d10d5107dd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.161928] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b4c621-488e-41d5-8f45-3035ed5660fa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.164791] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c95bad1-1396-47e0-8de7-db61d1069e53 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.188401] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Updating instance 'cabe40a0-8bd0-4d77-b949-298bd194fa42' progress to 67 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1049.194127] env[62522]: DEBUG oslo_vmware.api [None req-210a8bb7-8817-4b7b-8372-c39bf9d22e8b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 1049.194127] env[62522]: value = "task-2416024" [ 1049.194127] env[62522]: _type = "Task" [ 1049.194127] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.204681] env[62522]: DEBUG oslo_vmware.api [None req-210a8bb7-8817-4b7b-8372-c39bf9d22e8b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2416024, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.258601] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fc3250d2-b4d8-4ed7-9d60-363bee15c7a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1049.259012] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fc3250d2-b4d8-4ed7-9d60-363bee15c7a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1049.259275] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc3250d2-b4d8-4ed7-9d60-363bee15c7a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Deleting the datastore file [datastore2] 02708991-7f71-408e-89d8-932b845553d1 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1049.259580] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d49a29fe-f585-4347-aa1f-1c79d1376bef {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.264992] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e96ad95d-4971-453b-a45a-e08147f0fdf5 tempest-ServersTestFqdnHostnames-1058698417 tempest-ServersTestFqdnHostnames-1058698417-project-member] Lock "a10c4dee-4490-445a-bea2-9f8ef5425d15" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.040s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.268816] env[62522]: DEBUG oslo_vmware.api [None req-fc3250d2-b4d8-4ed7-9d60-363bee15c7a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1049.268816] env[62522]: value = "task-2416026" [ 1049.268816] env[62522]: _type = "Task" [ 1049.268816] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.283043] env[62522]: DEBUG oslo_vmware.api [None req-fc3250d2-b4d8-4ed7-9d60-363bee15c7a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416026, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.371080] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Preparing fetch location {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1049.371080] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Fetch image to [datastore2] OSTACK_IMG_66ce1766-1263-42e8-8354-460eac23e01a/OSTACK_IMG_66ce1766-1263-42e8-8354-460eac23e01a.vmdk {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1049.371080] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Downloading stream optimized image f9ed5f31-86db-46dd-b6c8-bd2cbb2f7fe8 to [datastore2] OSTACK_IMG_66ce1766-1263-42e8-8354-460eac23e01a/OSTACK_IMG_66ce1766-1263-42e8-8354-460eac23e01a.vmdk on the data store datastore2 as vApp {{(pid=62522) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1049.371080] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Downloading image file data f9ed5f31-86db-46dd-b6c8-bd2cbb2f7fe8 to the ESX as VM named 'OSTACK_IMG_66ce1766-1263-42e8-8354-460eac23e01a' {{(pid=62522) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1049.457792] env[62522]: DEBUG oslo_vmware.rw_handles [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1049.457792] env[62522]: value = "resgroup-9" [ 1049.457792] env[62522]: _type = "ResourcePool" [ 1049.457792] env[62522]: }. {{(pid=62522) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1049.457792] env[62522]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-16d46cca-7e91-47fe-98da-729bfaccae61 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.485151] env[62522]: DEBUG oslo_vmware.rw_handles [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lease: (returnval){ [ 1049.485151] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bd0697-7052-adfc-d55c-f634302fbe33" [ 1049.485151] env[62522]: _type = "HttpNfcLease" [ 1049.485151] env[62522]: } obtained for vApp import into resource pool (val){ [ 1049.485151] env[62522]: value = "resgroup-9" [ 1049.485151] env[62522]: _type = "ResourcePool" [ 1049.485151] env[62522]: }. {{(pid=62522) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1049.485639] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the lease: (returnval){ [ 1049.485639] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bd0697-7052-adfc-d55c-f634302fbe33" [ 1049.485639] env[62522]: _type = "HttpNfcLease" [ 1049.485639] env[62522]: } to be ready. {{(pid=62522) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1049.497449] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1049.497449] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bd0697-7052-adfc-d55c-f634302fbe33" [ 1049.497449] env[62522]: _type = "HttpNfcLease" [ 1049.497449] env[62522]: } is initializing. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1049.600796] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9118f03a-bbf6-4f26-a652-19ae466c1a83 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.608847] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d80d814-2199-4954-8a18-a1f3f6a0e0cd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.645244] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1701f4d-2449-4a4c-b94f-57cdca3e5a2f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.654268] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ff8be30-7b03-41a4-a1bb-74d7c4fb92c3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.671167] env[62522]: DEBUG nova.compute.provider_tree [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1049.694904] env[62522]: INFO nova.compute.manager [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] instance snapshotting [ 1049.695581] env[62522]: DEBUG nova.objects.instance [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lazy-loading 'flavor' on Instance uuid 7f8a8270-5014-446c-aa42-ea0b4079e5a9 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1049.715863] env[62522]: DEBUG oslo_vmware.api [None req-210a8bb7-8817-4b7b-8372-c39bf9d22e8b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2416024, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.781338] env[62522]: DEBUG oslo_vmware.api [None req-fc3250d2-b4d8-4ed7-9d60-363bee15c7a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416026, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.377132} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.785104] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc3250d2-b4d8-4ed7-9d60-363bee15c7a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1049.785431] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fc3250d2-b4d8-4ed7-9d60-363bee15c7a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1049.785697] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fc3250d2-b4d8-4ed7-9d60-363bee15c7a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1049.785962] env[62522]: INFO nova.compute.manager [None req-fc3250d2-b4d8-4ed7-9d60-363bee15c7a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 02708991-7f71-408e-89d8-932b845553d1] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1049.786225] env[62522]: DEBUG oslo.service.loopingcall [None req-fc3250d2-b4d8-4ed7-9d60-363bee15c7a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1049.786935] env[62522]: DEBUG nova.compute.manager [-] [instance: 02708991-7f71-408e-89d8-932b845553d1] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1049.787055] env[62522]: DEBUG nova.network.neutron [-] [instance: 02708991-7f71-408e-89d8-932b845553d1] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1049.820817] env[62522]: DEBUG nova.network.neutron [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Port 9e10cc19-76da-49d9-80b6-068ce128a1b0 binding to destination host cpu-1 is already ACTIVE {{(pid=62522) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1049.920027] env[62522]: INFO nova.compute.manager [None req-2b2184f1-5c23-4455-b71a-c1650ae72353 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Unrescuing [ 1049.920367] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2b2184f1-5c23-4455-b71a-c1650ae72353 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquiring lock "refresh_cache-5c9b1120-84ad-48d5-8cd4-0cf387963066" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1049.920607] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2b2184f1-5c23-4455-b71a-c1650ae72353 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquired lock "refresh_cache-5c9b1120-84ad-48d5-8cd4-0cf387963066" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.920855] env[62522]: DEBUG nova.network.neutron [None req-2b2184f1-5c23-4455-b71a-c1650ae72353 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1049.994754] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1049.994754] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bd0697-7052-adfc-d55c-f634302fbe33" [ 1049.994754] env[62522]: _type = "HttpNfcLease" [ 1049.994754] env[62522]: } is initializing. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1049.999654] env[62522]: DEBUG nova.network.neutron [req-e595a338-0ab3-4d0e-93fd-ddf0879d5e6b req-387e19a7-12df-4d68-a5e4-a8dd970158ed service nova] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Updated VIF entry in instance network info cache for port 7e36641e-fc4a-4223-ab07-33dc49821168. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1050.001249] env[62522]: DEBUG nova.network.neutron [req-e595a338-0ab3-4d0e-93fd-ddf0879d5e6b req-387e19a7-12df-4d68-a5e4-a8dd970158ed service nova] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Updating instance_info_cache with network_info: [{"id": "7e36641e-fc4a-4223-ab07-33dc49821168", "address": "fa:16:3e:f1:bf:49", "network": {"id": "b837f0fb-c2e1-46dd-93b2-62d6c4352316", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1813744063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed393a0454b643eea75c203d1dfd592c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e36641e-fc", "ovs_interfaceid": "7e36641e-fc4a-4223-ab07-33dc49821168", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.063823] env[62522]: DEBUG nova.objects.instance [None req-cd07e64a-2654-40a3-b169-83da16ecb6b8 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lazy-loading 'flavor' on Instance uuid 3c4c395c-0625-4569-990d-e2d4ad162c14 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1050.197192] env[62522]: ERROR nova.scheduler.client.report [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] [req-66f17404-a251-4c2e-a695-21850fd1c2f7] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c7fa38b2-245d-4337-a012-22c1a01c0a72. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-66f17404-a251-4c2e-a695-21850fd1c2f7"}]} [ 1050.205926] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2da3ce26-c595-422d-a3ea-6c9725f2873d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.212538] env[62522]: DEBUG oslo_vmware.api [None req-210a8bb7-8817-4b7b-8372-c39bf9d22e8b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2416024, 'name': CreateSnapshot_Task, 'duration_secs': 0.909203} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.213293] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-210a8bb7-8817-4b7b-8372-c39bf9d22e8b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Created Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1050.214168] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-051a6e84-2759-4179-ba96-465ab8b04bca {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.235797] env[62522]: DEBUG nova.scheduler.client.report [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Refreshing inventories for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1050.239098] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f892fad-92b9-43f1-8d60-6d1297690a64 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.268472] env[62522]: DEBUG nova.scheduler.client.report [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Updating ProviderTree inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1050.268753] env[62522]: DEBUG nova.compute.provider_tree [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1050.286801] env[62522]: DEBUG nova.scheduler.client.report [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Refreshing aggregate associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, aggregates: None {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1050.310142] env[62522]: DEBUG nova.scheduler.client.report [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Refreshing trait associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1050.495084] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1050.495084] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bd0697-7052-adfc-d55c-f634302fbe33" [ 1050.495084] env[62522]: _type = "HttpNfcLease" [ 1050.495084] env[62522]: } is ready. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1050.495432] env[62522]: DEBUG oslo_vmware.rw_handles [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1050.495432] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bd0697-7052-adfc-d55c-f634302fbe33" [ 1050.495432] env[62522]: _type = "HttpNfcLease" [ 1050.495432] env[62522]: }. {{(pid=62522) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1050.496286] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa4d646d-fa53-40b6-9f06-7b8415e14382 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.504947] env[62522]: DEBUG oslo_vmware.rw_handles [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5201d111-84d9-d3d3-f355-b92c26f27bab/disk-0.vmdk from lease info. {{(pid=62522) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1050.505211] env[62522]: DEBUG oslo_vmware.rw_handles [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Creating HTTP connection to write to file with size = 31591424 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5201d111-84d9-d3d3-f355-b92c26f27bab/disk-0.vmdk. {{(pid=62522) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1050.509642] env[62522]: DEBUG oslo_concurrency.lockutils [req-e595a338-0ab3-4d0e-93fd-ddf0879d5e6b req-387e19a7-12df-4d68-a5e4-a8dd970158ed service nova] Releasing lock "refresh_cache-04a9d357-d094-487b-8f09-2f7e0c35f0d7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1050.581770] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cd07e64a-2654-40a3-b169-83da16ecb6b8 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquiring lock "refresh_cache-3c4c395c-0625-4569-990d-e2d4ad162c14" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1050.582579] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cd07e64a-2654-40a3-b169-83da16ecb6b8 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquired lock "refresh_cache-3c4c395c-0625-4569-990d-e2d4ad162c14" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.582579] env[62522]: DEBUG nova.network.neutron [None req-cd07e64a-2654-40a3-b169-83da16ecb6b8 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1050.582579] env[62522]: DEBUG nova.objects.instance [None req-cd07e64a-2654-40a3-b169-83da16ecb6b8 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lazy-loading 'info_cache' on Instance uuid 3c4c395c-0625-4569-990d-e2d4ad162c14 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1050.586933] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e97573ea-87b8-42ed-a830-e071bc60f0a6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.652077] env[62522]: DEBUG nova.network.neutron [-] [instance: 02708991-7f71-408e-89d8-932b845553d1] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.731932] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4114175c-faf4-4e71-8fcb-794bdee3b0cf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.736322] env[62522]: DEBUG nova.network.neutron [None req-2b2184f1-5c23-4455-b71a-c1650ae72353 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Updating instance_info_cache with network_info: [{"id": "78283962-9062-464a-b1f4-a2319257559b", "address": "fa:16:3e:6a:27:1b", "network": {"id": "4b04f6e1-0714-469b-9941-be6f5b6128d0", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-292476225-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e1ca5dfb8f2d4b2e932679e017fe8b3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78283962-90", "ovs_interfaceid": "78283962-9062-464a-b1f4-a2319257559b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.740903] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b25bbd0e-f4e9-46bb-addc-716ead2f9cf9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.777113] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-210a8bb7-8817-4b7b-8372-c39bf9d22e8b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Creating linked-clone VM from snapshot {{(pid=62522) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1050.778052] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Creating Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1050.779077] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-d1aad902-d116-4bbf-bf31-eb3c7d672773 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.782009] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-981fca2e-fdfd-4f63-9d21-565b46e59730 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.789019] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edadaf04-05b9-47bb-9b5d-a3b59b529a7d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.797734] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d380bc-de63-47df-8b8c-1c75f8f23c7d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.807028] env[62522]: DEBUG oslo_vmware.api [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1050.807028] env[62522]: value = "task-2416028" [ 1050.807028] env[62522]: _type = "Task" [ 1050.807028] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.807028] env[62522]: DEBUG oslo_vmware.api [None req-210a8bb7-8817-4b7b-8372-c39bf9d22e8b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 1050.807028] env[62522]: value = "task-2416029" [ 1050.807028] env[62522]: _type = "Task" [ 1050.807028] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.819858] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "interface-fcd0eef6-d059-4495-a982-058b6c9626d1-fa421858-7ef8-4e24-94ec-cb1477a79f22" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1050.820152] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "interface-fcd0eef6-d059-4495-a982-058b6c9626d1-fa421858-7ef8-4e24-94ec-cb1477a79f22" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1050.820460] env[62522]: DEBUG nova.objects.instance [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lazy-loading 'flavor' on Instance uuid fcd0eef6-d059-4495-a982-058b6c9626d1 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1050.823261] env[62522]: DEBUG nova.compute.provider_tree [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1050.829118] env[62522]: DEBUG oslo_vmware.api [None req-210a8bb7-8817-4b7b-8372-c39bf9d22e8b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2416029, 'name': CloneVM_Task} progress is 11%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.840083] env[62522]: DEBUG oslo_vmware.api [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416028, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.846902] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "cabe40a0-8bd0-4d77-b949-298bd194fa42-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1050.847581] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "cabe40a0-8bd0-4d77-b949-298bd194fa42-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1050.847581] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "cabe40a0-8bd0-4d77-b949-298bd194fa42-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.086360] env[62522]: DEBUG oslo_concurrency.lockutils [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Acquiring lock "9141ffdd-cbfa-4efe-a01b-dc1326af474c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.086360] env[62522]: DEBUG oslo_concurrency.lockutils [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Lock "9141ffdd-cbfa-4efe-a01b-dc1326af474c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.086360] env[62522]: DEBUG oslo_concurrency.lockutils [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Acquiring lock "9141ffdd-cbfa-4efe-a01b-dc1326af474c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.086360] env[62522]: DEBUG oslo_concurrency.lockutils [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Lock "9141ffdd-cbfa-4efe-a01b-dc1326af474c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.086360] env[62522]: DEBUG oslo_concurrency.lockutils [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Lock "9141ffdd-cbfa-4efe-a01b-dc1326af474c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.088957] env[62522]: DEBUG nova.objects.base [None req-cd07e64a-2654-40a3-b169-83da16ecb6b8 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Object Instance<3c4c395c-0625-4569-990d-e2d4ad162c14> lazy-loaded attributes: flavor,info_cache {{(pid=62522) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1051.090395] env[62522]: INFO nova.compute.manager [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Terminating instance [ 1051.155287] env[62522]: INFO nova.compute.manager [-] [instance: 02708991-7f71-408e-89d8-932b845553d1] Took 1.37 seconds to deallocate network for instance. [ 1051.240247] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2b2184f1-5c23-4455-b71a-c1650ae72353 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Releasing lock "refresh_cache-5c9b1120-84ad-48d5-8cd4-0cf387963066" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1051.240626] env[62522]: DEBUG nova.objects.instance [None req-2b2184f1-5c23-4455-b71a-c1650ae72353 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Lazy-loading 'flavor' on Instance uuid 5c9b1120-84ad-48d5-8cd4-0cf387963066 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1051.268184] env[62522]: DEBUG nova.compute.manager [req-6d4a07f8-727b-4824-a8cf-337d984ed654 req-0eecc8ac-713a-4637-8185-4febdb0f3cce service nova] [instance: 02708991-7f71-408e-89d8-932b845553d1] Received event network-vif-deleted-cb27129b-6e1b-4340-8b38-2b33a9c6c83c {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1051.327438] env[62522]: DEBUG oslo_vmware.api [None req-210a8bb7-8817-4b7b-8372-c39bf9d22e8b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2416029, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.337063] env[62522]: DEBUG oslo_vmware.api [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416028, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.382131] env[62522]: DEBUG nova.scheduler.client.report [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Updated inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with generation 123 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1051.382131] env[62522]: DEBUG nova.compute.provider_tree [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Updating resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 generation from 123 to 124 during operation: update_inventory {{(pid=62522) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1051.382131] env[62522]: DEBUG nova.compute.provider_tree [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1051.538745] env[62522]: DEBUG nova.objects.instance [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lazy-loading 'pci_requests' on Instance uuid fcd0eef6-d059-4495-a982-058b6c9626d1 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1051.597820] env[62522]: DEBUG nova.compute.manager [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1051.597820] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1051.598742] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5733ff9-0eff-4d78-a9f1-b29bd641b017 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.607522] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1051.610431] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5891c958-2a42-4a90-983c-b262db034da0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.619122] env[62522]: DEBUG oslo_vmware.api [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Waiting for the task: (returnval){ [ 1051.619122] env[62522]: value = "task-2416030" [ 1051.619122] env[62522]: _type = "Task" [ 1051.619122] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.633009] env[62522]: DEBUG oslo_vmware.api [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2416030, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.664442] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fc3250d2-b4d8-4ed7-9d60-363bee15c7a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.750348] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e4ba29e-b4e8-4747-92a0-b42d5c33bea3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.773359] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b2184f1-5c23-4455-b71a-c1650ae72353 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1051.777904] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c50cdfb2-9054-48a9-bccb-a1a1dfbaefa8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.786708] env[62522]: DEBUG oslo_vmware.api [None req-2b2184f1-5c23-4455-b71a-c1650ae72353 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1051.786708] env[62522]: value = "task-2416031" [ 1051.786708] env[62522]: _type = "Task" [ 1051.786708] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.800402] env[62522]: DEBUG oslo_vmware.api [None req-2b2184f1-5c23-4455-b71a-c1650ae72353 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2416031, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.820309] env[62522]: DEBUG oslo_vmware.api [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416028, 'name': CreateSnapshot_Task, 'duration_secs': 0.556839} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.824086] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Created Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1051.824086] env[62522]: DEBUG oslo_vmware.api [None req-210a8bb7-8817-4b7b-8372-c39bf9d22e8b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2416029, 'name': CloneVM_Task} progress is 95%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.827881] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-978ea6bd-6803-43c5-98be-246311346e50 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.889074] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.165s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.894173] env[62522]: DEBUG oslo_concurrency.lockutils [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.660s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.894446] env[62522]: DEBUG nova.objects.instance [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lazy-loading 'resources' on Instance uuid 97f4c6ab-04de-4069-8ce0-1509c30ffb0f {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1051.915986] env[62522]: INFO nova.scheduler.client.report [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Deleted allocations for instance 4e9436df-c86b-429b-abc2-97f760858055 [ 1051.921395] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "refresh_cache-cabe40a0-8bd0-4d77-b949-298bd194fa42" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1051.922508] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquired lock "refresh_cache-cabe40a0-8bd0-4d77-b949-298bd194fa42" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.922508] env[62522]: DEBUG nova.network.neutron [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1052.045770] env[62522]: DEBUG nova.objects.base [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62522) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1052.045770] env[62522]: DEBUG nova.network.neutron [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1052.080345] env[62522]: DEBUG nova.network.neutron [None req-cd07e64a-2654-40a3-b169-83da16ecb6b8 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Updating instance_info_cache with network_info: [{"id": "1b7d6d1b-0d3c-47b3-8d93-c8cdc0b1e00f", "address": "fa:16:3e:41:5d:d8", "network": {"id": "c3450427-ea7e-4a07-8399-53265d390e06", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1613138323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.174", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "686854cd52ce4809a4d315275260da54", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c42bb08a-77b4-4bba-8166-702cbb1b5f1e", "external-id": "nsx-vlan-transportzone-137", "segmentation_id": 137, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b7d6d1b-0d", "ovs_interfaceid": "1b7d6d1b-0d3c-47b3-8d93-c8cdc0b1e00f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.133590] env[62522]: DEBUG oslo_vmware.api [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2416030, 'name': PowerOffVM_Task, 'duration_secs': 0.464426} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.133906] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1052.134117] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1052.134406] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fdffa142-0bca-4dc9-b080-2858c98706c5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.137408] env[62522]: DEBUG nova.policy [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab5e5a8e6ee64aad8d52342ee3f5af36', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4bdd1f5caf09454d808bcdc15df2d3a7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1052.204432] env[62522]: DEBUG oslo_vmware.rw_handles [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Completed reading data from the image iterator. {{(pid=62522) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1052.204810] env[62522]: DEBUG oslo_vmware.rw_handles [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5201d111-84d9-d3d3-f355-b92c26f27bab/disk-0.vmdk. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1052.206192] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80bbb9fa-53f0-4f2e-943c-15069cfb2f33 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.216473] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1052.216764] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1052.216973] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Deleting the datastore file [datastore1] 9141ffdd-cbfa-4efe-a01b-dc1326af474c {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1052.217301] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f5bee1c-f7d9-4d50-b11f-9dcdc7ab63db {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.221357] env[62522]: DEBUG oslo_vmware.rw_handles [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5201d111-84d9-d3d3-f355-b92c26f27bab/disk-0.vmdk is in state: ready. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1052.221657] env[62522]: DEBUG oslo_vmware.rw_handles [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5201d111-84d9-d3d3-f355-b92c26f27bab/disk-0.vmdk. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1052.222568] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-21baa806-3242-47fe-9912-3d5555418658 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.227898] env[62522]: DEBUG oslo_vmware.api [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Waiting for the task: (returnval){ [ 1052.227898] env[62522]: value = "task-2416033" [ 1052.227898] env[62522]: _type = "Task" [ 1052.227898] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.237375] env[62522]: DEBUG oslo_vmware.api [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2416033, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.299312] env[62522]: DEBUG oslo_vmware.api [None req-2b2184f1-5c23-4455-b71a-c1650ae72353 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2416031, 'name': PowerOffVM_Task, 'duration_secs': 0.315904} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.299738] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b2184f1-5c23-4455-b71a-c1650ae72353 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1052.305100] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b2184f1-5c23-4455-b71a-c1650ae72353 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Reconfiguring VM instance instance-00000058 to detach disk 2001 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1052.305405] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d321c790-4833-4a66-abe6-6435139af30a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.328237] env[62522]: DEBUG oslo_vmware.api [None req-210a8bb7-8817-4b7b-8372-c39bf9d22e8b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2416029, 'name': CloneVM_Task, 'duration_secs': 1.364932} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.329644] env[62522]: INFO nova.virt.vmwareapi.vmops [None req-210a8bb7-8817-4b7b-8372-c39bf9d22e8b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Created linked-clone VM from snapshot [ 1052.329980] env[62522]: DEBUG oslo_vmware.api [None req-2b2184f1-5c23-4455-b71a-c1650ae72353 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1052.329980] env[62522]: value = "task-2416034" [ 1052.329980] env[62522]: _type = "Task" [ 1052.329980] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.330690] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e10ace8e-a65c-4fa3-9c56-a5b1ddd78c28 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.350754] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Creating linked-clone VM from snapshot {{(pid=62522) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1052.351137] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-210a8bb7-8817-4b7b-8372-c39bf9d22e8b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Uploading image 87ddd106-4ae3-4029-a2ef-7e054bf22bab {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1052.353209] env[62522]: DEBUG oslo_vmware.api [None req-2b2184f1-5c23-4455-b71a-c1650ae72353 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2416034, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.353647] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ab9f5565-606d-401b-8af8-e3e25233bacc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.362015] env[62522]: DEBUG oslo_vmware.api [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1052.362015] env[62522]: value = "task-2416035" [ 1052.362015] env[62522]: _type = "Task" [ 1052.362015] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.366673] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-210a8bb7-8817-4b7b-8372-c39bf9d22e8b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Destroying the VM {{(pid=62522) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1052.366950] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-cb30a098-bfaa-4455-be8b-aa8565cb9ba3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.375482] env[62522]: DEBUG oslo_vmware.api [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416035, 'name': CloneVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.377048] env[62522]: DEBUG oslo_vmware.api [None req-210a8bb7-8817-4b7b-8372-c39bf9d22e8b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 1052.377048] env[62522]: value = "task-2416036" [ 1052.377048] env[62522]: _type = "Task" [ 1052.377048] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.387666] env[62522]: DEBUG oslo_vmware.api [None req-210a8bb7-8817-4b7b-8372-c39bf9d22e8b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2416036, 'name': Destroy_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.432764] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8beab9b1-c696-4f91-adb0-62a484a27da7 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lock "4e9436df-c86b-429b-abc2-97f760858055" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.738s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1052.525972] env[62522]: DEBUG oslo_vmware.rw_handles [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5201d111-84d9-d3d3-f355-b92c26f27bab/disk-0.vmdk. {{(pid=62522) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1052.528160] env[62522]: INFO nova.virt.vmwareapi.images [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Downloaded image file data f9ed5f31-86db-46dd-b6c8-bd2cbb2f7fe8 [ 1052.528160] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a93e550-c325-4473-9dde-9e14d5b2ebaa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.556219] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-562e9898-4c3d-4f1e-b074-1dbc94acbecb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.585017] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cd07e64a-2654-40a3-b169-83da16ecb6b8 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Releasing lock "refresh_cache-3c4c395c-0625-4569-990d-e2d4ad162c14" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1052.588895] env[62522]: INFO nova.virt.vmwareapi.images [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] The imported VM was unregistered [ 1052.592069] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Caching image {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1052.592367] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Creating directory with path [datastore2] devstack-image-cache_base/f9ed5f31-86db-46dd-b6c8-bd2cbb2f7fe8 {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1052.595592] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-889f00ed-3a04-430e-8ffb-2d0c013195d2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.628615] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Created directory with path [datastore2] devstack-image-cache_base/f9ed5f31-86db-46dd-b6c8-bd2cbb2f7fe8 {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1052.628841] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_66ce1766-1263-42e8-8354-460eac23e01a/OSTACK_IMG_66ce1766-1263-42e8-8354-460eac23e01a.vmdk to [datastore2] devstack-image-cache_base/f9ed5f31-86db-46dd-b6c8-bd2cbb2f7fe8/f9ed5f31-86db-46dd-b6c8-bd2cbb2f7fe8.vmdk. {{(pid=62522) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1052.629070] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-4efd5560-1204-4bb2-b1a1-6c109d9d6907 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.639763] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 1052.639763] env[62522]: value = "task-2416038" [ 1052.639763] env[62522]: _type = "Task" [ 1052.639763] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.651261] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416038, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.748110] env[62522]: DEBUG oslo_vmware.api [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Task: {'id': task-2416033, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.362442} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.748110] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1052.748110] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1052.748110] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1052.748110] env[62522]: INFO nova.compute.manager [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1052.748367] env[62522]: DEBUG oslo.service.loopingcall [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1052.749522] env[62522]: DEBUG nova.compute.manager [-] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1052.749522] env[62522]: DEBUG nova.network.neutron [-] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1052.756630] env[62522]: DEBUG nova.network.neutron [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Updating instance_info_cache with network_info: [{"id": "9e10cc19-76da-49d9-80b6-068ce128a1b0", "address": "fa:16:3e:3f:35:de", "network": {"id": "70e81afa-0eda-49c5-b072-e79b3c287468", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1715169983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff5da278d2be4ca983424c8291beadec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7654928b-7afe-42e3-a18d-68ecc775cefe", "external-id": "cl2-zone-807", "segmentation_id": 807, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e10cc19-76", "ovs_interfaceid": "9e10cc19-76da-49d9-80b6-068ce128a1b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.779762] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8577a87-1f80-4c64-8d1b-158d7e7ea139 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.788385] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11f95ffd-bd23-4fad-8571-3783572f9de8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.821741] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-028be178-18b2-4cb1-aee8-d48ac1406da1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.830179] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29cebc0d-e0f8-4c0c-9522-1fcb355ef1dd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.847055] env[62522]: DEBUG nova.compute.provider_tree [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1052.851960] env[62522]: DEBUG oslo_vmware.api [None req-2b2184f1-5c23-4455-b71a-c1650ae72353 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2416034, 'name': ReconfigVM_Task, 'duration_secs': 0.34184} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.852249] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b2184f1-5c23-4455-b71a-c1650ae72353 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Reconfigured VM instance instance-00000058 to detach disk 2001 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1052.852438] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b2184f1-5c23-4455-b71a-c1650ae72353 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1052.852714] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b9959cef-30c7-49a4-9b25-cb6f1640571b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.863287] env[62522]: DEBUG oslo_vmware.api [None req-2b2184f1-5c23-4455-b71a-c1650ae72353 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1052.863287] env[62522]: value = "task-2416039" [ 1052.863287] env[62522]: _type = "Task" [ 1052.863287] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.874856] env[62522]: DEBUG oslo_vmware.api [None req-2b2184f1-5c23-4455-b71a-c1650ae72353 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2416039, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.878655] env[62522]: DEBUG oslo_vmware.api [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416035, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.887748] env[62522]: DEBUG oslo_vmware.api [None req-210a8bb7-8817-4b7b-8372-c39bf9d22e8b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2416036, 'name': Destroy_Task} progress is 33%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.153310] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416038, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.264556] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Releasing lock "refresh_cache-cabe40a0-8bd0-4d77-b949-298bd194fa42" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1053.350227] env[62522]: DEBUG nova.scheduler.client.report [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1053.383263] env[62522]: DEBUG oslo_vmware.api [None req-2b2184f1-5c23-4455-b71a-c1650ae72353 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2416039, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.383529] env[62522]: DEBUG oslo_vmware.api [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416035, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.394555] env[62522]: DEBUG oslo_vmware.api [None req-210a8bb7-8817-4b7b-8372-c39bf9d22e8b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2416036, 'name': Destroy_Task, 'duration_secs': 0.649045} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.394847] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-210a8bb7-8817-4b7b-8372-c39bf9d22e8b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Destroyed the VM [ 1053.395107] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-210a8bb7-8817-4b7b-8372-c39bf9d22e8b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Deleting Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1053.395386] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0aebd33d-befe-4cfc-b3cf-eaaa4ea416cc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.404400] env[62522]: DEBUG oslo_vmware.api [None req-210a8bb7-8817-4b7b-8372-c39bf9d22e8b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 1053.404400] env[62522]: value = "task-2416040" [ 1053.404400] env[62522]: _type = "Task" [ 1053.404400] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.414810] env[62522]: DEBUG oslo_vmware.api [None req-210a8bb7-8817-4b7b-8372-c39bf9d22e8b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2416040, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.590852] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd07e64a-2654-40a3-b169-83da16ecb6b8 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1053.591311] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb787358-f4f2-49bf-82e3-533db0d28acb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.600693] env[62522]: DEBUG oslo_vmware.api [None req-cd07e64a-2654-40a3-b169-83da16ecb6b8 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 1053.600693] env[62522]: value = "task-2416041" [ 1053.600693] env[62522]: _type = "Task" [ 1053.600693] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.611052] env[62522]: DEBUG oslo_vmware.api [None req-cd07e64a-2654-40a3-b169-83da16ecb6b8 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416041, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.653100] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416038, 'name': MoveVirtualDisk_Task} progress is 29%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.793694] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b9500cd-0f86-49c7-ac27-2e3268137555 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.818720] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52382a0f-3244-4095-ba18-2bb2f810b637 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.827711] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Updating instance 'cabe40a0-8bd0-4d77-b949-298bd194fa42' progress to 83 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1053.852444] env[62522]: DEBUG nova.network.neutron [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Successfully updated port: fa421858-7ef8-4e24-94ec-cb1477a79f22 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1053.857277] env[62522]: DEBUG oslo_concurrency.lockutils [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.963s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.860391] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 5.057s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1053.888465] env[62522]: DEBUG oslo_vmware.api [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416035, 'name': CloneVM_Task} progress is 95%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.888844] env[62522]: DEBUG oslo_vmware.api [None req-2b2184f1-5c23-4455-b71a-c1650ae72353 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2416039, 'name': PowerOnVM_Task, 'duration_secs': 0.662073} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.890384] env[62522]: INFO nova.scheduler.client.report [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Deleted allocations for instance 97f4c6ab-04de-4069-8ce0-1509c30ffb0f [ 1053.894448] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b2184f1-5c23-4455-b71a-c1650ae72353 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1053.894448] env[62522]: DEBUG nova.compute.manager [None req-2b2184f1-5c23-4455-b71a-c1650ae72353 tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1053.895985] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76ada981-f860-4f48-8ec8-35a04204193e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.919866] env[62522]: DEBUG nova.compute.manager [req-9ba92139-4052-47c9-ade2-c0365595ea30 req-c26af136-6ff0-47e8-97e4-feffc9be8b09 service nova] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Received event network-vif-deleted-40bd7b1c-a8fa-4e59-802e-a8392e0d30eb {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1053.920116] env[62522]: INFO nova.compute.manager [req-9ba92139-4052-47c9-ade2-c0365595ea30 req-c26af136-6ff0-47e8-97e4-feffc9be8b09 service nova] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Neutron deleted interface 40bd7b1c-a8fa-4e59-802e-a8392e0d30eb; detaching it from the instance and deleting it from the info cache [ 1053.920304] env[62522]: DEBUG nova.network.neutron [req-9ba92139-4052-47c9-ade2-c0365595ea30 req-c26af136-6ff0-47e8-97e4-feffc9be8b09 service nova] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.932214] env[62522]: DEBUG oslo_vmware.api [None req-210a8bb7-8817-4b7b-8372-c39bf9d22e8b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2416040, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.942107] env[62522]: DEBUG nova.compute.manager [req-ab032697-af9f-4988-be2c-c34204d7cd60 req-22f084e8-d506-4be2-9177-6812ddcfb6bc service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Received event network-vif-plugged-fa421858-7ef8-4e24-94ec-cb1477a79f22 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1053.942327] env[62522]: DEBUG oslo_concurrency.lockutils [req-ab032697-af9f-4988-be2c-c34204d7cd60 req-22f084e8-d506-4be2-9177-6812ddcfb6bc service nova] Acquiring lock "fcd0eef6-d059-4495-a982-058b6c9626d1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1053.942581] env[62522]: DEBUG oslo_concurrency.lockutils [req-ab032697-af9f-4988-be2c-c34204d7cd60 req-22f084e8-d506-4be2-9177-6812ddcfb6bc service nova] Lock "fcd0eef6-d059-4495-a982-058b6c9626d1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1053.942875] env[62522]: DEBUG oslo_concurrency.lockutils [req-ab032697-af9f-4988-be2c-c34204d7cd60 req-22f084e8-d506-4be2-9177-6812ddcfb6bc service nova] Lock "fcd0eef6-d059-4495-a982-058b6c9626d1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.943494] env[62522]: DEBUG nova.compute.manager [req-ab032697-af9f-4988-be2c-c34204d7cd60 req-22f084e8-d506-4be2-9177-6812ddcfb6bc service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] No waiting events found dispatching network-vif-plugged-fa421858-7ef8-4e24-94ec-cb1477a79f22 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1053.943729] env[62522]: WARNING nova.compute.manager [req-ab032697-af9f-4988-be2c-c34204d7cd60 req-22f084e8-d506-4be2-9177-6812ddcfb6bc service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Received unexpected event network-vif-plugged-fa421858-7ef8-4e24-94ec-cb1477a79f22 for instance with vm_state active and task_state None. [ 1054.112350] env[62522]: DEBUG oslo_vmware.api [None req-cd07e64a-2654-40a3-b169-83da16ecb6b8 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416041, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.151980] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416038, 'name': MoveVirtualDisk_Task} progress is 49%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.198552] env[62522]: DEBUG nova.network.neutron [-] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1054.334890] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1054.335283] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6259f60c-2a36-4cbc-87a6-6623c4fbf358 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.349319] env[62522]: DEBUG oslo_vmware.api [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 1054.349319] env[62522]: value = "task-2416042" [ 1054.349319] env[62522]: _type = "Task" [ 1054.349319] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.355348] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "refresh_cache-fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1054.355540] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired lock "refresh_cache-fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1054.355724] env[62522]: DEBUG nova.network.neutron [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1054.360635] env[62522]: DEBUG oslo_vmware.api [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2416042, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.385163] env[62522]: DEBUG oslo_vmware.api [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416035, 'name': CloneVM_Task, 'duration_secs': 1.708423} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.385809] env[62522]: INFO nova.virt.vmwareapi.vmops [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Created linked-clone VM from snapshot [ 1054.386658] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d50ac68-4f4a-41ae-a538-24ab47451b2d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.398267] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Uploading image 0f0df71d-7d6e-452b-9dfd-236a14f4f7a2 {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1054.405610] env[62522]: DEBUG oslo_concurrency.lockutils [None req-316d9982-e8d7-48cb-81d7-bf34796dce84 tempest-ListServersNegativeTestJSON-2140492499 tempest-ListServersNegativeTestJSON-2140492499-project-member] Lock "97f4c6ab-04de-4069-8ce0-1509c30ffb0f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.648s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1054.420454] env[62522]: DEBUG oslo_vmware.api [None req-210a8bb7-8817-4b7b-8372-c39bf9d22e8b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2416040, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.425982] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f85fac7b-8b40-47b8-a353-885a7751e618 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.435782] env[62522]: DEBUG oslo_vmware.rw_handles [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1054.435782] env[62522]: value = "vm-489804" [ 1054.435782] env[62522]: _type = "VirtualMachine" [ 1054.435782] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1054.436387] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-5fe7f9b1-12c5-4742-b057-6736fcb94bd4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.444334] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c96503d-79eb-4026-b52a-324d7b5d842b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.462377] env[62522]: DEBUG oslo_vmware.rw_handles [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lease: (returnval){ [ 1054.462377] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52be4e91-3290-85b9-3c26-ed545aab38c8" [ 1054.462377] env[62522]: _type = "HttpNfcLease" [ 1054.462377] env[62522]: } obtained for exporting VM: (result){ [ 1054.462377] env[62522]: value = "vm-489804" [ 1054.462377] env[62522]: _type = "VirtualMachine" [ 1054.462377] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1054.462898] env[62522]: DEBUG oslo_vmware.api [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the lease: (returnval){ [ 1054.462898] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52be4e91-3290-85b9-3c26-ed545aab38c8" [ 1054.462898] env[62522]: _type = "HttpNfcLease" [ 1054.462898] env[62522]: } to be ready. {{(pid=62522) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1054.473391] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1054.473391] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52be4e91-3290-85b9-3c26-ed545aab38c8" [ 1054.473391] env[62522]: _type = "HttpNfcLease" [ 1054.473391] env[62522]: } is ready. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1054.473738] env[62522]: DEBUG oslo_vmware.rw_handles [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1054.473738] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52be4e91-3290-85b9-3c26-ed545aab38c8" [ 1054.473738] env[62522]: _type = "HttpNfcLease" [ 1054.473738] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1054.474637] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b20ef48f-727a-417d-8fd8-79d65e722f32 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.494877] env[62522]: DEBUG nova.compute.manager [req-9ba92139-4052-47c9-ade2-c0365595ea30 req-c26af136-6ff0-47e8-97e4-feffc9be8b09 service nova] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Detach interface failed, port_id=40bd7b1c-a8fa-4e59-802e-a8392e0d30eb, reason: Instance 9141ffdd-cbfa-4efe-a01b-dc1326af474c could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1054.498750] env[62522]: DEBUG oslo_vmware.rw_handles [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525eeb25-8cc9-e1d1-ddf4-a1b9d04c64d6/disk-0.vmdk from lease info. {{(pid=62522) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1054.499990] env[62522]: DEBUG oslo_vmware.rw_handles [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525eeb25-8cc9-e1d1-ddf4-a1b9d04c64d6/disk-0.vmdk for reading. {{(pid=62522) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1054.616035] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-10243c22-2a20-4e9a-8140-a387e37c717c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.617015] env[62522]: DEBUG oslo_vmware.api [None req-cd07e64a-2654-40a3-b169-83da16ecb6b8 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416041, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.660831] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416038, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.703025] env[62522]: INFO nova.compute.manager [-] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Took 1.95 seconds to deallocate network for instance. [ 1054.867281] env[62522]: DEBUG oslo_vmware.api [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2416042, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.879284] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Applying migration context for instance cabe40a0-8bd0-4d77-b949-298bd194fa42 as it has an incoming, in-progress migration 31f1ffb8-0f4c-4352-bb6b-cf4a60fb08b0. Migration status is post-migrating {{(pid=62522) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1054.881425] env[62522]: INFO nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Updating resource usage from migration 31f1ffb8-0f4c-4352-bb6b-cf4a60fb08b0 [ 1054.907689] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance c181ce48-9fe2-4400-9047-f8b5a7159dd3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1054.907848] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance cd69a052-369b-4809-baf0-a1aec44f4ab5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1054.907997] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance ebca687d-4de7-4fd6-99fb-b4f0154abe9c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1054.908138] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 3c4c395c-0625-4569-990d-e2d4ad162c14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1054.908254] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance bf44e269-0297-473e-b6ce-04a40d0ec1b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1054.908404] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 9141ffdd-cbfa-4efe-a01b-dc1326af474c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1054.908644] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance c28d2907-5b59-4df8-91a8-4ba0f2047d89 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1054.908822] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1054.908971] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance fcd0eef6-d059-4495-a982-058b6c9626d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1054.909108] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 8539afc0-1753-4c37-9fc9-25ec97b97243 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1054.909224] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 7f8a8270-5014-446c-aa42-ea0b4079e5a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1054.909335] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 548364e9-b19a-4777-8e62-19b8a0594f36 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1054.909487] env[62522]: WARNING nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 02708991-7f71-408e-89d8-932b845553d1 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1054.909616] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 5c9b1120-84ad-48d5-8cd4-0cf387963066 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1054.909816] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 1a5a235a-477f-4da5-b5c1-ee057211cce8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1054.910014] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Migration 31f1ffb8-0f4c-4352-bb6b-cf4a60fb08b0 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1054.910746] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance cabe40a0-8bd0-4d77-b949-298bd194fa42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1054.910945] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 04a9d357-d094-487b-8f09-2f7e0c35f0d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1054.911130] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=62522) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1054.911294] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3840MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=17 pci_stats=[] {{(pid=62522) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1054.925190] env[62522]: WARNING nova.network.neutron [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] be0fe686-4986-439e-aa82-5cbe54104c8a already exists in list: networks containing: ['be0fe686-4986-439e-aa82-5cbe54104c8a']. ignoring it [ 1054.925408] env[62522]: WARNING nova.network.neutron [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] be0fe686-4986-439e-aa82-5cbe54104c8a already exists in list: networks containing: ['be0fe686-4986-439e-aa82-5cbe54104c8a']. ignoring it [ 1054.937478] env[62522]: DEBUG oslo_vmware.api [None req-210a8bb7-8817-4b7b-8372-c39bf9d22e8b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2416040, 'name': RemoveSnapshot_Task, 'duration_secs': 1.162891} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.941907] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-210a8bb7-8817-4b7b-8372-c39bf9d22e8b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Deleted Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1055.118956] env[62522]: DEBUG oslo_vmware.api [None req-cd07e64a-2654-40a3-b169-83da16ecb6b8 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416041, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.154372] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416038, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.208398] env[62522]: DEBUG oslo_concurrency.lockutils [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1055.234333] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff723f0-91aa-43a3-a139-de448d90f958 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.244171] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ae6d52a-870e-4226-8d8d-87f73ad10f7b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.284115] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ccf6e50-65e2-44ac-b4d3-a6976ab91335 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.295024] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-606edf2e-33b9-4c6c-9a5a-39c040fb32c7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.310059] env[62522]: DEBUG nova.compute.provider_tree [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1055.349473] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquiring lock "5c9b1120-84ad-48d5-8cd4-0cf387963066" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1055.349745] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Lock "5c9b1120-84ad-48d5-8cd4-0cf387963066" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1055.349979] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquiring lock "5c9b1120-84ad-48d5-8cd4-0cf387963066-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1055.350236] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Lock "5c9b1120-84ad-48d5-8cd4-0cf387963066-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1055.350435] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Lock "5c9b1120-84ad-48d5-8cd4-0cf387963066-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.356648] env[62522]: INFO nova.compute.manager [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Terminating instance [ 1055.365729] env[62522]: DEBUG oslo_vmware.api [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2416042, 'name': PowerOnVM_Task, 'duration_secs': 0.932591} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.366059] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1055.366272] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ab3ef289-fcf7-4d90-8ca6-39a4824280fc tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Updating instance 'cabe40a0-8bd0-4d77-b949-298bd194fa42' progress to 100 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1055.451509] env[62522]: WARNING nova.compute.manager [None req-210a8bb7-8817-4b7b-8372-c39bf9d22e8b tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Image not found during snapshot: nova.exception.ImageNotFound: Image 87ddd106-4ae3-4029-a2ef-7e054bf22bab could not be found. [ 1055.618340] env[62522]: DEBUG oslo_vmware.api [None req-cd07e64a-2654-40a3-b169-83da16ecb6b8 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416041, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.656195] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416038, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.936604} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.656922] env[62522]: INFO nova.virt.vmwareapi.ds_util [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_66ce1766-1263-42e8-8354-460eac23e01a/OSTACK_IMG_66ce1766-1263-42e8-8354-460eac23e01a.vmdk to [datastore2] devstack-image-cache_base/f9ed5f31-86db-46dd-b6c8-bd2cbb2f7fe8/f9ed5f31-86db-46dd-b6c8-bd2cbb2f7fe8.vmdk. [ 1055.656922] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Cleaning up location [datastore2] OSTACK_IMG_66ce1766-1263-42e8-8354-460eac23e01a {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1055.657097] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_66ce1766-1263-42e8-8354-460eac23e01a {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1055.658721] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-46a3ae7c-8742-45a5-9528-12dd033d72a9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.666682] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 1055.666682] env[62522]: value = "task-2416044" [ 1055.666682] env[62522]: _type = "Task" [ 1055.666682] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.687023] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416044, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.808620] env[62522]: DEBUG nova.network.neutron [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Updating instance_info_cache with network_info: [{"id": "954fee91-36f2-497a-a856-6828a519a456", "address": "fa:16:3e:df:f4:48", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.136", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap954fee91-36", "ovs_interfaceid": "954fee91-36f2-497a-a856-6828a519a456", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0ac91806-75b5-459d-8243-019320a7daf0", "address": "fa:16:3e:d2:6b:0e", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ac91806-75", "ovs_interfaceid": "0ac91806-75b5-459d-8243-019320a7daf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fa421858-7ef8-4e24-94ec-cb1477a79f22", "address": "fa:16:3e:2f:7c:ba", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa421858-7e", "ovs_interfaceid": "fa421858-7ef8-4e24-94ec-cb1477a79f22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.812869] env[62522]: DEBUG nova.scheduler.client.report [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1055.861330] env[62522]: DEBUG nova.compute.manager [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1055.861664] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1055.862909] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af6f17b-748f-4d5e-95c8-24befa34b1e6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.874030] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1055.874385] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-504493cc-dd45-4394-8f2a-2961487ff696 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.882506] env[62522]: DEBUG oslo_vmware.api [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1055.882506] env[62522]: value = "task-2416045" [ 1055.882506] env[62522]: _type = "Task" [ 1055.882506] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.892584] env[62522]: DEBUG oslo_vmware.api [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2416045, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.975252] env[62522]: DEBUG nova.compute.manager [req-02782023-d4c4-416c-8978-d2d7ef693153 req-078cb295-5864-411b-b6a9-ec4f66a05840 service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Received event network-changed-fa421858-7ef8-4e24-94ec-cb1477a79f22 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1055.975753] env[62522]: DEBUG nova.compute.manager [req-02782023-d4c4-416c-8978-d2d7ef693153 req-078cb295-5864-411b-b6a9-ec4f66a05840 service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Refreshing instance network info cache due to event network-changed-fa421858-7ef8-4e24-94ec-cb1477a79f22. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1055.975988] env[62522]: DEBUG oslo_concurrency.lockutils [req-02782023-d4c4-416c-8978-d2d7ef693153 req-078cb295-5864-411b-b6a9-ec4f66a05840 service nova] Acquiring lock "refresh_cache-fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1056.073022] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "1a5a235a-477f-4da5-b5c1-ee057211cce8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.073332] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "1a5a235a-477f-4da5-b5c1-ee057211cce8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1056.073554] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "1a5a235a-477f-4da5-b5c1-ee057211cce8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.073739] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "1a5a235a-477f-4da5-b5c1-ee057211cce8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1056.073970] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "1a5a235a-477f-4da5-b5c1-ee057211cce8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1056.077159] env[62522]: INFO nova.compute.manager [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Terminating instance [ 1056.115930] env[62522]: DEBUG oslo_vmware.api [None req-cd07e64a-2654-40a3-b169-83da16ecb6b8 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416041, 'name': PowerOnVM_Task, 'duration_secs': 2.086573} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.117330] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd07e64a-2654-40a3-b169-83da16ecb6b8 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1056.117330] env[62522]: DEBUG nova.compute.manager [None req-cd07e64a-2654-40a3-b169-83da16ecb6b8 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1056.117757] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eae028d-76fb-4be8-8c2d-d73e7928fd77 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.176921] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416044, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.093406} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.178991] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1056.179202] env[62522]: DEBUG oslo_concurrency.lockutils [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f9ed5f31-86db-46dd-b6c8-bd2cbb2f7fe8/f9ed5f31-86db-46dd-b6c8-bd2cbb2f7fe8.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1056.179456] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f9ed5f31-86db-46dd-b6c8-bd2cbb2f7fe8/f9ed5f31-86db-46dd-b6c8-bd2cbb2f7fe8.vmdk to [datastore2] 04a9d357-d094-487b-8f09-2f7e0c35f0d7/04a9d357-d094-487b-8f09-2f7e0c35f0d7.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1056.180024] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-37f25dc8-3611-4435-ab21-613d4bdcd3db {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.188679] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 1056.188679] env[62522]: value = "task-2416046" [ 1056.188679] env[62522]: _type = "Task" [ 1056.188679] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.198334] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416046, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.310018] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Releasing lock "refresh_cache-fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1056.310656] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1056.310805] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired lock "fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1056.311097] env[62522]: DEBUG oslo_concurrency.lockutils [req-02782023-d4c4-416c-8978-d2d7ef693153 req-078cb295-5864-411b-b6a9-ec4f66a05840 service nova] Acquired lock "refresh_cache-fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1056.311277] env[62522]: DEBUG nova.network.neutron [req-02782023-d4c4-416c-8978-d2d7ef693153 req-078cb295-5864-411b-b6a9-ec4f66a05840 service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Refreshing network info cache for port fa421858-7ef8-4e24-94ec-cb1477a79f22 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1056.316740] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bcd72e5-7676-4558-80d3-ef327a6bc996 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.321765] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62522) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1056.322271] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.462s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1056.322271] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fc3250d2-b4d8-4ed7-9d60-363bee15c7a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.658s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1056.322420] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fc3250d2-b4d8-4ed7-9d60-363bee15c7a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1056.327213] env[62522]: DEBUG oslo_concurrency.lockutils [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.119s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1056.327433] env[62522]: DEBUG nova.objects.instance [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Lazy-loading 'resources' on Instance uuid 9141ffdd-cbfa-4efe-a01b-dc1326af474c {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1056.347303] env[62522]: DEBUG nova.virt.hardware [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1056.347570] env[62522]: DEBUG nova.virt.hardware [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1056.347733] env[62522]: DEBUG nova.virt.hardware [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1056.347914] env[62522]: DEBUG nova.virt.hardware [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1056.348081] env[62522]: DEBUG nova.virt.hardware [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1056.348234] env[62522]: DEBUG nova.virt.hardware [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1056.348438] env[62522]: DEBUG nova.virt.hardware [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1056.348592] env[62522]: DEBUG nova.virt.hardware [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1056.348770] env[62522]: DEBUG nova.virt.hardware [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1056.348937] env[62522]: DEBUG nova.virt.hardware [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1056.349135] env[62522]: DEBUG nova.virt.hardware [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1056.356085] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Reconfiguring VM to attach interface {{(pid=62522) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1056.356346] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d00a320-82f4-4edc-826b-75b45d3004ec {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.370162] env[62522]: INFO nova.scheduler.client.report [None req-fc3250d2-b4d8-4ed7-9d60-363bee15c7a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Deleted allocations for instance 02708991-7f71-408e-89d8-932b845553d1 [ 1056.386021] env[62522]: DEBUG oslo_vmware.api [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 1056.386021] env[62522]: value = "task-2416047" [ 1056.386021] env[62522]: _type = "Task" [ 1056.386021] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.398393] env[62522]: DEBUG oslo_vmware.api [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2416045, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.401963] env[62522]: DEBUG oslo_vmware.api [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416047, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.581677] env[62522]: DEBUG nova.compute.manager [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1056.582842] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1056.582986] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc04ff98-d9a4-4d54-ba17-5dcbcd286e4c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.593766] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1056.594089] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7e094769-4e48-4743-a042-6709261c6174 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.602907] env[62522]: DEBUG oslo_vmware.api [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 1056.602907] env[62522]: value = "task-2416048" [ 1056.602907] env[62522]: _type = "Task" [ 1056.602907] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.611593] env[62522]: DEBUG oslo_vmware.api [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2416048, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.700150] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416046, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.883618] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fc3250d2-b4d8-4ed7-9d60-363bee15c7a2 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "02708991-7f71-408e-89d8-932b845553d1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.814s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1056.912389] env[62522]: DEBUG oslo_vmware.api [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2416045, 'name': PowerOffVM_Task, 'duration_secs': 0.770784} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.916126] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1056.916345] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1056.916986] env[62522]: DEBUG oslo_vmware.api [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416047, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.919781] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bdf1ee8a-94d2-486a-a338-52bb8868ab62 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.012041] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1057.012041] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1057.012041] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Deleting the datastore file [datastore1] 5c9b1120-84ad-48d5-8cd4-0cf387963066 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1057.012041] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-11cbfc65-369a-46e4-9798-2eff46c456d5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.027277] env[62522]: DEBUG oslo_vmware.api [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1057.027277] env[62522]: value = "task-2416050" [ 1057.027277] env[62522]: _type = "Task" [ 1057.027277] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.044312] env[62522]: DEBUG oslo_vmware.api [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2416050, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.120292] env[62522]: DEBUG oslo_vmware.api [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2416048, 'name': PowerOffVM_Task, 'duration_secs': 0.24809} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.124459] env[62522]: DEBUG nova.network.neutron [req-02782023-d4c4-416c-8978-d2d7ef693153 req-078cb295-5864-411b-b6a9-ec4f66a05840 service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Updated VIF entry in instance network info cache for port fa421858-7ef8-4e24-94ec-cb1477a79f22. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1057.125349] env[62522]: DEBUG nova.network.neutron [req-02782023-d4c4-416c-8978-d2d7ef693153 req-078cb295-5864-411b-b6a9-ec4f66a05840 service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Updating instance_info_cache with network_info: [{"id": "954fee91-36f2-497a-a856-6828a519a456", "address": "fa:16:3e:df:f4:48", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.136", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap954fee91-36", "ovs_interfaceid": "954fee91-36f2-497a-a856-6828a519a456", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0ac91806-75b5-459d-8243-019320a7daf0", "address": "fa:16:3e:d2:6b:0e", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ac91806-75", "ovs_interfaceid": "0ac91806-75b5-459d-8243-019320a7daf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fa421858-7ef8-4e24-94ec-cb1477a79f22", "address": "fa:16:3e:2f:7c:ba", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa421858-7e", "ovs_interfaceid": "fa421858-7ef8-4e24-94ec-cb1477a79f22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1057.128135] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1057.128490] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1057.129524] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-35688354-0b76-4027-859e-e8429012cfcb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.158739] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d179bcd8-d5b4-40db-a44b-07bf9e4e5b88 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.168170] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cce074b-7bc7-4a5b-bed3-348daec28f18 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.208507] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-168809ec-db2b-48ad-aea4-01ec46be012f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.214434] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1057.214763] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1057.214977] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Deleting the datastore file [datastore1] 1a5a235a-477f-4da5-b5c1-ee057211cce8 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1057.216380] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d01bdbda-4161-455a-830f-439fde76753d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.224199] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416046, 'name': CopyVirtualDisk_Task} progress is 26%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.226050] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bc3dfb1-7df1-4b6e-8e93-2ede96ae5b3f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.232014] env[62522]: DEBUG oslo_vmware.api [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for the task: (returnval){ [ 1057.232014] env[62522]: value = "task-2416052" [ 1057.232014] env[62522]: _type = "Task" [ 1057.232014] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.245115] env[62522]: DEBUG nova.compute.provider_tree [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1057.253372] env[62522]: DEBUG oslo_vmware.api [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2416052, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.411079] env[62522]: DEBUG oslo_vmware.api [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416047, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.545089] env[62522]: DEBUG oslo_vmware.api [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2416050, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.287355} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.545421] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1057.545645] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1057.545913] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1057.546357] env[62522]: INFO nova.compute.manager [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Took 1.68 seconds to destroy the instance on the hypervisor. [ 1057.546828] env[62522]: DEBUG oslo.service.loopingcall [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1057.547185] env[62522]: DEBUG nova.compute.manager [-] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1057.547366] env[62522]: DEBUG nova.network.neutron [-] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1057.629759] env[62522]: DEBUG oslo_concurrency.lockutils [req-02782023-d4c4-416c-8978-d2d7ef693153 req-078cb295-5864-411b-b6a9-ec4f66a05840 service nova] Releasing lock "refresh_cache-fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1057.648161] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "522e778b-6e01-4554-a3eb-dd1efa7870de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1057.648530] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "522e778b-6e01-4554-a3eb-dd1efa7870de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.716261] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416046, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.746060] env[62522]: DEBUG oslo_vmware.api [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Task: {'id': task-2416052, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.26524} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.752601] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1057.753091] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1057.753190] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1057.753824] env[62522]: INFO nova.compute.manager [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1057.753824] env[62522]: DEBUG oslo.service.loopingcall [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1057.754476] env[62522]: DEBUG nova.compute.manager [-] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1057.754476] env[62522]: DEBUG nova.network.neutron [-] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1057.776988] env[62522]: ERROR nova.scheduler.client.report [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] [req-e367b264-7e3a-4d9c-82d0-d6302b3865c6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c7fa38b2-245d-4337-a012-22c1a01c0a72. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e367b264-7e3a-4d9c-82d0-d6302b3865c6"}]} [ 1057.796981] env[62522]: DEBUG nova.scheduler.client.report [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Refreshing inventories for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1057.816791] env[62522]: DEBUG nova.scheduler.client.report [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Updating ProviderTree inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1057.817112] env[62522]: DEBUG nova.compute.provider_tree [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1057.835933] env[62522]: DEBUG nova.scheduler.client.report [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Refreshing aggregate associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, aggregates: None {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1057.875022] env[62522]: DEBUG nova.scheduler.client.report [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Refreshing trait associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1057.913849] env[62522]: DEBUG oslo_vmware.api [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416047, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.040259] env[62522]: DEBUG oslo_concurrency.lockutils [None req-22dd138e-3c7e-4866-97bc-846cd9a311ff tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "cabe40a0-8bd0-4d77-b949-298bd194fa42" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1058.040546] env[62522]: DEBUG oslo_concurrency.lockutils [None req-22dd138e-3c7e-4866-97bc-846cd9a311ff tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "cabe40a0-8bd0-4d77-b949-298bd194fa42" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1058.040731] env[62522]: DEBUG nova.compute.manager [None req-22dd138e-3c7e-4866-97bc-846cd9a311ff tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Going to confirm migration 4 {{(pid=62522) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1058.151585] env[62522]: DEBUG nova.compute.manager [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1058.200203] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c6ed1a1-ccde-4ecf-b18b-8e0ce212e179 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.212021] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e9557a-7ed3-44c6-879e-a3a3bdaea345 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.219938] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416046, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.255634] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4365c66-dc75-4713-8a75-2b4e3e95889a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.265299] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-443d97c8-0863-482f-b346-e68c1a30f1e5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.281780] env[62522]: DEBUG nova.compute.provider_tree [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1058.360893] env[62522]: DEBUG nova.network.neutron [-] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.413602] env[62522]: DEBUG oslo_vmware.api [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416047, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.522845] env[62522]: DEBUG nova.compute.manager [req-1de0dfb4-e456-48c9-a155-d98e11ee16d0 req-462970b1-896f-4b71-b64d-e7a8b9c90c70 service nova] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Received event network-vif-deleted-78283962-9062-464a-b1f4-a2319257559b {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1058.544362] env[62522]: DEBUG nova.compute.manager [req-3afc2a61-9875-406a-bfd9-fad06818c32e req-aac5a8a9-40fc-4444-9187-20dcc1600901 service nova] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Received event network-vif-deleted-f38ae927-c0d7-4f7c-91ab-2354af588af0 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1058.544362] env[62522]: INFO nova.compute.manager [req-3afc2a61-9875-406a-bfd9-fad06818c32e req-aac5a8a9-40fc-4444-9187-20dcc1600901 service nova] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Neutron deleted interface f38ae927-c0d7-4f7c-91ab-2354af588af0; detaching it from the instance and deleting it from the info cache [ 1058.544510] env[62522]: DEBUG nova.network.neutron [req-3afc2a61-9875-406a-bfd9-fad06818c32e req-aac5a8a9-40fc-4444-9187-20dcc1600901 service nova] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.585432] env[62522]: DEBUG nova.network.neutron [-] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.603907] env[62522]: DEBUG oslo_concurrency.lockutils [None req-22dd138e-3c7e-4866-97bc-846cd9a311ff tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "refresh_cache-cabe40a0-8bd0-4d77-b949-298bd194fa42" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1058.604236] env[62522]: DEBUG oslo_concurrency.lockutils [None req-22dd138e-3c7e-4866-97bc-846cd9a311ff tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquired lock "refresh_cache-cabe40a0-8bd0-4d77-b949-298bd194fa42" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.604477] env[62522]: DEBUG nova.network.neutron [None req-22dd138e-3c7e-4866-97bc-846cd9a311ff tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1058.604835] env[62522]: DEBUG nova.objects.instance [None req-22dd138e-3c7e-4866-97bc-846cd9a311ff tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lazy-loading 'info_cache' on Instance uuid cabe40a0-8bd0-4d77-b949-298bd194fa42 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1058.684324] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1058.717265] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416046, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.819282] env[62522]: DEBUG nova.scheduler.client.report [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Updated inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with generation 125 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1058.819591] env[62522]: DEBUG nova.compute.provider_tree [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Updating resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 generation from 125 to 126 during operation: update_inventory {{(pid=62522) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1058.819797] env[62522]: DEBUG nova.compute.provider_tree [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1058.863974] env[62522]: INFO nova.compute.manager [-] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Took 1.32 seconds to deallocate network for instance. [ 1058.911291] env[62522]: DEBUG oslo_vmware.api [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416047, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.000047] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "c28d2907-5b59-4df8-91a8-4ba0f2047d89" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1059.000047] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "c28d2907-5b59-4df8-91a8-4ba0f2047d89" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1059.000552] env[62522]: INFO nova.compute.manager [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Shelving [ 1059.051024] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ca45b185-2441-42be-84d3-7deecc4e415b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.062860] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbbc1155-898a-4660-88d6-fc88b49525b8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.087771] env[62522]: INFO nova.compute.manager [-] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Took 1.33 seconds to deallocate network for instance. [ 1059.103625] env[62522]: DEBUG nova.compute.manager [req-3afc2a61-9875-406a-bfd9-fad06818c32e req-aac5a8a9-40fc-4444-9187-20dcc1600901 service nova] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Detach interface failed, port_id=f38ae927-c0d7-4f7c-91ab-2354af588af0, reason: Instance 1a5a235a-477f-4da5-b5c1-ee057211cce8 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1059.216297] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416046, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.893269} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.216652] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f9ed5f31-86db-46dd-b6c8-bd2cbb2f7fe8/f9ed5f31-86db-46dd-b6c8-bd2cbb2f7fe8.vmdk to [datastore2] 04a9d357-d094-487b-8f09-2f7e0c35f0d7/04a9d357-d094-487b-8f09-2f7e0c35f0d7.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1059.217566] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d65de8e-ad2d-413a-8264-314fa814f1b2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.243580] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Reconfiguring VM instance instance-00000040 to attach disk [datastore2] 04a9d357-d094-487b-8f09-2f7e0c35f0d7/04a9d357-d094-487b-8f09-2f7e0c35f0d7.vmdk or device None with type streamOptimized {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1059.244068] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-287fee9e-946b-4ce5-82cd-31d32c0add9a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.268197] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 1059.268197] env[62522]: value = "task-2416053" [ 1059.268197] env[62522]: _type = "Task" [ 1059.268197] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.278253] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416053, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.325492] env[62522]: DEBUG oslo_concurrency.lockutils [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.998s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1059.328327] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.644s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1059.330181] env[62522]: INFO nova.compute.claims [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1059.354026] env[62522]: INFO nova.scheduler.client.report [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Deleted allocations for instance 9141ffdd-cbfa-4efe-a01b-dc1326af474c [ 1059.371528] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1059.411692] env[62522]: DEBUG oslo_vmware.api [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416047, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.609434] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1059.780346] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416053, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.861166] env[62522]: DEBUG oslo_concurrency.lockutils [None req-86877cfd-bf6b-45e1-9d67-9ecff2b1e91e tempest-ServersWithSpecificFlavorTestJSON-1625145482 tempest-ServersWithSpecificFlavorTestJSON-1625145482-project-member] Lock "9141ffdd-cbfa-4efe-a01b-dc1326af474c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.775s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1059.863863] env[62522]: DEBUG nova.network.neutron [None req-22dd138e-3c7e-4866-97bc-846cd9a311ff tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Updating instance_info_cache with network_info: [{"id": "9e10cc19-76da-49d9-80b6-068ce128a1b0", "address": "fa:16:3e:3f:35:de", "network": {"id": "70e81afa-0eda-49c5-b072-e79b3c287468", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1715169983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff5da278d2be4ca983424c8291beadec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7654928b-7afe-42e3-a18d-68ecc775cefe", "external-id": "cl2-zone-807", "segmentation_id": 807, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e10cc19-76", "ovs_interfaceid": "9e10cc19-76da-49d9-80b6-068ce128a1b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.910640] env[62522]: DEBUG oslo_vmware.api [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416047, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.010275] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1060.010613] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c00bc984-9a79-405a-a7d6-8cbdad7f29b0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.021066] env[62522]: DEBUG oslo_vmware.api [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1060.021066] env[62522]: value = "task-2416054" [ 1060.021066] env[62522]: _type = "Task" [ 1060.021066] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.029176] env[62522]: DEBUG oslo_vmware.api [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416054, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.280344] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416053, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.366478] env[62522]: DEBUG oslo_concurrency.lockutils [None req-22dd138e-3c7e-4866-97bc-846cd9a311ff tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Releasing lock "refresh_cache-cabe40a0-8bd0-4d77-b949-298bd194fa42" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1060.366781] env[62522]: DEBUG nova.objects.instance [None req-22dd138e-3c7e-4866-97bc-846cd9a311ff tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lazy-loading 'migration_context' on Instance uuid cabe40a0-8bd0-4d77-b949-298bd194fa42 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1060.421337] env[62522]: DEBUG oslo_vmware.api [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416047, 'name': ReconfigVM_Task, 'duration_secs': 3.805495} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.422045] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Releasing lock "fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1060.422376] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Reconfigured VM to attach interface {{(pid=62522) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1060.534274] env[62522]: DEBUG oslo_vmware.api [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416054, 'name': PowerOffVM_Task, 'duration_secs': 0.495381} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.536965] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1060.538958] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82b400ff-fe2d-4701-9ffc-3ac0a3aacd6f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.562756] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bf473b7-4101-4fe9-aa49-a002654be3f2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.604464] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9fa32e6-2ea7-4c18-b33f-805fec83619c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.614413] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6445396a-cc8b-4f09-90fa-d1a87e8ef708 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.645952] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34f3e76c-8700-4a7a-bce3-8c71950c20c2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.654801] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8e21282-6c8d-4fc7-b57d-065612b7b050 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.669997] env[62522]: DEBUG nova.compute.provider_tree [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1060.783059] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416053, 'name': ReconfigVM_Task, 'duration_secs': 1.117222} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.783059] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Reconfigured VM instance instance-00000040 to attach disk [datastore2] 04a9d357-d094-487b-8f09-2f7e0c35f0d7/04a9d357-d094-487b-8f09-2f7e0c35f0d7.vmdk or device None with type streamOptimized {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1060.783719] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-54d89237-42cb-422d-b575-f333179adc8e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.791393] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 1060.791393] env[62522]: value = "task-2416055" [ 1060.791393] env[62522]: _type = "Task" [ 1060.791393] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.801028] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416055, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.869635] env[62522]: DEBUG nova.objects.base [None req-22dd138e-3c7e-4866-97bc-846cd9a311ff tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62522) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1060.870675] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b226d9b5-0e81-4980-a765-103d653d1573 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.890086] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82060870-5d64-413d-a892-ce3c34f6d00d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.896274] env[62522]: DEBUG oslo_vmware.api [None req-22dd138e-3c7e-4866-97bc-846cd9a311ff tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 1060.896274] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a423f1-b960-10b2-0d6b-5a807d502ff9" [ 1060.896274] env[62522]: _type = "Task" [ 1060.896274] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.905940] env[62522]: DEBUG oslo_vmware.api [None req-22dd138e-3c7e-4866-97bc-846cd9a311ff tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a423f1-b960-10b2-0d6b-5a807d502ff9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.927963] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3f8c1ef8-3ced-4ef9-bf14-7af5c921cbb4 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "interface-fcd0eef6-d059-4495-a982-058b6c9626d1-fa421858-7ef8-4e24-94ec-cb1477a79f22" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 10.108s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1061.074836] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Creating Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1061.075193] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-176cf00b-42b8-4a24-b837-08e1661b7d1a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.084766] env[62522]: DEBUG oslo_vmware.api [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1061.084766] env[62522]: value = "task-2416056" [ 1061.084766] env[62522]: _type = "Task" [ 1061.084766] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.094028] env[62522]: DEBUG oslo_vmware.api [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416056, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.173728] env[62522]: DEBUG nova.scheduler.client.report [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1061.303245] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416055, 'name': Rename_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.408321] env[62522]: DEBUG oslo_vmware.api [None req-22dd138e-3c7e-4866-97bc-846cd9a311ff tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a423f1-b960-10b2-0d6b-5a807d502ff9, 'name': SearchDatastore_Task, 'duration_secs': 0.008961} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.408620] env[62522]: DEBUG oslo_concurrency.lockutils [None req-22dd138e-3c7e-4866-97bc-846cd9a311ff tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1061.595150] env[62522]: DEBUG oslo_vmware.api [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416056, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.678792] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.350s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1061.680078] env[62522]: DEBUG nova.compute.manager [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1061.682492] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.311s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1061.683203] env[62522]: DEBUG nova.objects.instance [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Lazy-loading 'resources' on Instance uuid 5c9b1120-84ad-48d5-8cd4-0cf387963066 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1061.805159] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416055, 'name': Rename_Task, 'duration_secs': 0.81041} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.805159] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1061.805159] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e99c98c7-4ec2-40e7-93f9-044fc34e6ad9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.814685] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 1061.814685] env[62522]: value = "task-2416057" [ 1061.814685] env[62522]: _type = "Task" [ 1061.814685] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.828485] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416057, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.096756] env[62522]: DEBUG oslo_vmware.api [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416056, 'name': CreateSnapshot_Task, 'duration_secs': 0.872635} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.097091] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Created Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1062.098254] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3f1bdb8-3800-46d8-89fa-84f5f6067490 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.189035] env[62522]: DEBUG nova.compute.utils [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1062.191844] env[62522]: DEBUG nova.compute.manager [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1062.192706] env[62522]: DEBUG nova.network.neutron [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1062.290587] env[62522]: DEBUG nova.policy [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9694ee575d094ccf845eb57acf3e70c6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '00b27498c07344d1bf9cecefa0fca033', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1062.327216] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416057, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.478119] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d598684b-c033-4842-b73e-77078633b90d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.488420] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ebb934f-5c49-45b6-a5d9-8eb8ea37c9cd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.524295] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ae354ef-f69e-4563-9b0c-e09089ec8e8a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.534665] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a94b1126-98b3-40fb-b067-7f3d3b100cb0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.556024] env[62522]: DEBUG nova.compute.provider_tree [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1062.622118] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Creating linked-clone VM from snapshot {{(pid=62522) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1062.622118] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-4799d5c9-b669-4928-9276-21ecdb97c7f5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.634761] env[62522]: DEBUG oslo_vmware.api [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1062.634761] env[62522]: value = "task-2416058" [ 1062.634761] env[62522]: _type = "Task" [ 1062.634761] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.644959] env[62522]: DEBUG oslo_vmware.api [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416058, 'name': CloneVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.693964] env[62522]: DEBUG nova.compute.manager [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1062.831307] env[62522]: DEBUG oslo_vmware.api [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416057, 'name': PowerOnVM_Task, 'duration_secs': 0.929696} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.831663] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1062.893418] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70e208c0-956a-429a-8bb4-176103810931 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "interface-fcd0eef6-d059-4495-a982-058b6c9626d1-0ac91806-75b5-459d-8243-019320a7daf0" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1062.893418] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70e208c0-956a-429a-8bb4-176103810931 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "interface-fcd0eef6-d059-4495-a982-058b6c9626d1-0ac91806-75b5-459d-8243-019320a7daf0" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1062.963510] env[62522]: DEBUG nova.compute.manager [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1062.965717] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb313ab-2033-48a7-b8a5-42d750ced5e5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.059900] env[62522]: DEBUG nova.scheduler.client.report [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1063.134310] env[62522]: DEBUG nova.network.neutron [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Successfully created port: b258477a-b39a-4d17-975a-087d4d6d41bd {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1063.153850] env[62522]: DEBUG oslo_vmware.api [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416058, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.366341] env[62522]: DEBUG oslo_concurrency.lockutils [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Acquiring lock "8539afc0-1753-4c37-9fc9-25ec97b97243" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1063.366616] env[62522]: DEBUG oslo_concurrency.lockutils [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Lock "8539afc0-1753-4c37-9fc9-25ec97b97243" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.366838] env[62522]: DEBUG oslo_concurrency.lockutils [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Acquiring lock "8539afc0-1753-4c37-9fc9-25ec97b97243-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1063.367030] env[62522]: DEBUG oslo_concurrency.lockutils [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Lock "8539afc0-1753-4c37-9fc9-25ec97b97243-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.367207] env[62522]: DEBUG oslo_concurrency.lockutils [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Lock "8539afc0-1753-4c37-9fc9-25ec97b97243-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.369749] env[62522]: INFO nova.compute.manager [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Terminating instance [ 1063.396179] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70e208c0-956a-429a-8bb4-176103810931 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1063.396370] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70e208c0-956a-429a-8bb4-176103810931 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired lock "fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.397329] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6f362c1-455a-430a-9946-9d38ef0c16ae {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.423846] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a406320b-7a61-4390-a973-7db49d992504 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.455744] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-70e208c0-956a-429a-8bb4-176103810931 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Reconfiguring VM to detach interface {{(pid=62522) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1063.455744] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0a4cc6dc-cd84-4106-89bc-fe511271dde9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.478154] env[62522]: DEBUG oslo_vmware.api [None req-70e208c0-956a-429a-8bb4-176103810931 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 1063.478154] env[62522]: value = "task-2416059" [ 1063.478154] env[62522]: _type = "Task" [ 1063.478154] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.493916] env[62522]: DEBUG oslo_vmware.api [None req-70e208c0-956a-429a-8bb4-176103810931 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416059, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.494562] env[62522]: DEBUG oslo_concurrency.lockutils [None req-94fadd48-eeef-4945-b907-54b2b30c5ec8 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lock "04a9d357-d094-487b-8f09-2f7e0c35f0d7" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 30.352s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.567670] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.885s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.571036] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.961s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.571036] env[62522]: DEBUG nova.objects.instance [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lazy-loading 'resources' on Instance uuid 1a5a235a-477f-4da5-b5c1-ee057211cce8 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1063.596470] env[62522]: INFO nova.scheduler.client.report [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Deleted allocations for instance 5c9b1120-84ad-48d5-8cd4-0cf387963066 [ 1063.648854] env[62522]: DEBUG oslo_vmware.api [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416058, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.703891] env[62522]: DEBUG nova.compute.manager [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1063.731885] env[62522]: DEBUG nova.virt.hardware [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1063.732198] env[62522]: DEBUG nova.virt.hardware [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1063.732362] env[62522]: DEBUG nova.virt.hardware [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1063.732543] env[62522]: DEBUG nova.virt.hardware [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1063.733191] env[62522]: DEBUG nova.virt.hardware [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1063.733191] env[62522]: DEBUG nova.virt.hardware [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1063.733191] env[62522]: DEBUG nova.virt.hardware [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1063.733402] env[62522]: DEBUG nova.virt.hardware [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1063.733402] env[62522]: DEBUG nova.virt.hardware [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1063.733539] env[62522]: DEBUG nova.virt.hardware [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1063.733712] env[62522]: DEBUG nova.virt.hardware [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1063.734909] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4417515-daca-4ddc-82a4-c892ea89209f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.744471] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a09cb8-9b14-46fc-a8e1-dbb93dd87ff4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.874045] env[62522]: DEBUG nova.compute.manager [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1063.874323] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1063.875288] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cf97ed4-b7c4-4e32-b69c-dd470f4c72e5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.883834] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1063.884120] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-179179f6-3116-45da-9929-16ea7f530c88 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.892659] env[62522]: DEBUG oslo_vmware.api [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Waiting for the task: (returnval){ [ 1063.892659] env[62522]: value = "task-2416060" [ 1063.892659] env[62522]: _type = "Task" [ 1063.892659] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.903154] env[62522]: DEBUG oslo_vmware.api [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Task: {'id': task-2416060, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.995869] env[62522]: DEBUG oslo_vmware.api [None req-70e208c0-956a-429a-8bb4-176103810931 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416059, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.108353] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8d69ed9-d70d-4a52-ac4f-a93d3fdba54b tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Lock "5c9b1120-84ad-48d5-8cd4-0cf387963066" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.758s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.158178] env[62522]: DEBUG oslo_vmware.api [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416058, 'name': CloneVM_Task} progress is 95%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.402522] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e9ee5a2-16a7-47d5-a350-c333d4e529a2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.408315] env[62522]: DEBUG oslo_vmware.api [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Task: {'id': task-2416060, 'name': PowerOffVM_Task, 'duration_secs': 0.407418} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.409036] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1064.409230] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1064.409721] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-09604214-c613-422c-acea-94ee0e5b288e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.414745] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fade1da-1ccc-40c3-b729-1d7687ebdcea {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.451211] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c71bd576-6c4e-4abe-b8be-bc80787b699e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.463743] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca958513-a1e8-4c37-9e37-2c2298dc8bbc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.480977] env[62522]: DEBUG nova.compute.provider_tree [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1064.499752] env[62522]: DEBUG oslo_vmware.api [None req-70e208c0-956a-429a-8bb4-176103810931 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416059, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.500380] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1064.500593] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1064.500769] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Deleting the datastore file [datastore1] 8539afc0-1753-4c37-9fc9-25ec97b97243 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1064.501055] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b6b8124e-ae2d-404c-b6a6-4b0ff5e9cf58 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.509789] env[62522]: DEBUG oslo_vmware.api [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Waiting for the task: (returnval){ [ 1064.509789] env[62522]: value = "task-2416062" [ 1064.509789] env[62522]: _type = "Task" [ 1064.509789] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.523529] env[62522]: DEBUG oslo_vmware.api [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Task: {'id': task-2416062, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.649963] env[62522]: DEBUG oslo_vmware.api [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416058, 'name': CloneVM_Task, 'duration_secs': 1.790365} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.650298] env[62522]: INFO nova.virt.vmwareapi.vmops [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Created linked-clone VM from snapshot [ 1064.651194] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d28b109-f178-41c0-a620-f58c2968bacb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.660829] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Uploading image 61bbb676-eb4a-448b-9d8a-abdf1c9af6ab {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1064.687789] env[62522]: DEBUG oslo_vmware.rw_handles [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1064.687789] env[62522]: value = "vm-489806" [ 1064.687789] env[62522]: _type = "VirtualMachine" [ 1064.687789] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1064.688097] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-c6f9ba2f-ecd0-4c83-8d68-3d8ca3ba18a1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.697180] env[62522]: DEBUG oslo_vmware.rw_handles [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lease: (returnval){ [ 1064.697180] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c407a8-6a0c-2c80-cb46-5237a7530c98" [ 1064.697180] env[62522]: _type = "HttpNfcLease" [ 1064.697180] env[62522]: } obtained for exporting VM: (result){ [ 1064.697180] env[62522]: value = "vm-489806" [ 1064.697180] env[62522]: _type = "VirtualMachine" [ 1064.697180] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1064.697557] env[62522]: DEBUG oslo_vmware.api [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the lease: (returnval){ [ 1064.697557] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c407a8-6a0c-2c80-cb46-5237a7530c98" [ 1064.697557] env[62522]: _type = "HttpNfcLease" [ 1064.697557] env[62522]: } to be ready. {{(pid=62522) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1064.706210] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1064.706210] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c407a8-6a0c-2c80-cb46-5237a7530c98" [ 1064.706210] env[62522]: _type = "HttpNfcLease" [ 1064.706210] env[62522]: } is initializing. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1064.935758] env[62522]: DEBUG nova.compute.manager [req-3611a1c5-2bec-414b-acec-bd67742b2b32 req-9695bf4e-7559-44bc-b764-0fd46d39b8d8 service nova] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Received event network-vif-plugged-b258477a-b39a-4d17-975a-087d4d6d41bd {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1064.936115] env[62522]: DEBUG oslo_concurrency.lockutils [req-3611a1c5-2bec-414b-acec-bd67742b2b32 req-9695bf4e-7559-44bc-b764-0fd46d39b8d8 service nova] Acquiring lock "522e778b-6e01-4554-a3eb-dd1efa7870de-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1064.936431] env[62522]: DEBUG oslo_concurrency.lockutils [req-3611a1c5-2bec-414b-acec-bd67742b2b32 req-9695bf4e-7559-44bc-b764-0fd46d39b8d8 service nova] Lock "522e778b-6e01-4554-a3eb-dd1efa7870de-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1064.936686] env[62522]: DEBUG oslo_concurrency.lockutils [req-3611a1c5-2bec-414b-acec-bd67742b2b32 req-9695bf4e-7559-44bc-b764-0fd46d39b8d8 service nova] Lock "522e778b-6e01-4554-a3eb-dd1efa7870de-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.936934] env[62522]: DEBUG nova.compute.manager [req-3611a1c5-2bec-414b-acec-bd67742b2b32 req-9695bf4e-7559-44bc-b764-0fd46d39b8d8 service nova] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] No waiting events found dispatching network-vif-plugged-b258477a-b39a-4d17-975a-087d4d6d41bd {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1064.937216] env[62522]: WARNING nova.compute.manager [req-3611a1c5-2bec-414b-acec-bd67742b2b32 req-9695bf4e-7559-44bc-b764-0fd46d39b8d8 service nova] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Received unexpected event network-vif-plugged-b258477a-b39a-4d17-975a-087d4d6d41bd for instance with vm_state building and task_state spawning. [ 1064.957885] env[62522]: DEBUG oslo_concurrency.lockutils [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquiring lock "548364e9-b19a-4777-8e62-19b8a0594f36" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1064.957885] env[62522]: DEBUG oslo_concurrency.lockutils [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Lock "548364e9-b19a-4777-8e62-19b8a0594f36" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1064.958119] env[62522]: DEBUG oslo_concurrency.lockutils [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquiring lock "548364e9-b19a-4777-8e62-19b8a0594f36-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1064.958213] env[62522]: DEBUG oslo_concurrency.lockutils [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Lock "548364e9-b19a-4777-8e62-19b8a0594f36-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1064.958296] env[62522]: DEBUG oslo_concurrency.lockutils [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Lock "548364e9-b19a-4777-8e62-19b8a0594f36-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.960576] env[62522]: INFO nova.compute.manager [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Terminating instance [ 1064.992182] env[62522]: DEBUG nova.scheduler.client.report [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1064.995641] env[62522]: DEBUG oslo_vmware.api [None req-70e208c0-956a-429a-8bb4-176103810931 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416059, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.021195] env[62522]: DEBUG oslo_vmware.api [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Task: {'id': task-2416062, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167055} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.021455] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1065.021639] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1065.021852] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1065.022042] env[62522]: INFO nova.compute.manager [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1065.022291] env[62522]: DEBUG oslo.service.loopingcall [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1065.022479] env[62522]: DEBUG nova.compute.manager [-] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1065.022574] env[62522]: DEBUG nova.network.neutron [-] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1065.091915] env[62522]: DEBUG nova.network.neutron [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Successfully updated port: b258477a-b39a-4d17-975a-087d4d6d41bd {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1065.207808] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1065.207808] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c407a8-6a0c-2c80-cb46-5237a7530c98" [ 1065.207808] env[62522]: _type = "HttpNfcLease" [ 1065.207808] env[62522]: } is ready. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1065.208182] env[62522]: DEBUG oslo_vmware.rw_handles [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1065.208182] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c407a8-6a0c-2c80-cb46-5237a7530c98" [ 1065.208182] env[62522]: _type = "HttpNfcLease" [ 1065.208182] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1065.208921] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af6cdc4-dd96-4345-a8c6-f8f9e732104b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.218225] env[62522]: DEBUG oslo_vmware.rw_handles [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529928a2-3869-7c6c-048a-d03aeeebcc4c/disk-0.vmdk from lease info. {{(pid=62522) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1065.218908] env[62522]: DEBUG oslo_vmware.rw_handles [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529928a2-3869-7c6c-048a-d03aeeebcc4c/disk-0.vmdk for reading. {{(pid=62522) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1065.320377] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ef10396e-2c86-4ec2-983f-e8fe9899a259 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.465850] env[62522]: DEBUG nova.compute.manager [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1065.466150] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1065.467171] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2a3c20f-52d0-4dd3-819c-cd3d7bd7db87 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.481975] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1065.487318] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-216ba884-6994-4f5b-993b-1e5d2e967889 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.497383] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.927s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1065.499759] env[62522]: DEBUG oslo_vmware.api [None req-70e208c0-956a-429a-8bb4-176103810931 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416059, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.502449] env[62522]: DEBUG oslo_concurrency.lockutils [None req-22dd138e-3c7e-4866-97bc-846cd9a311ff tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 4.093s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1065.503635] env[62522]: DEBUG oslo_vmware.api [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1065.503635] env[62522]: value = "task-2416064" [ 1065.503635] env[62522]: _type = "Task" [ 1065.503635] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.514623] env[62522]: DEBUG oslo_vmware.api [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2416064, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.546018] env[62522]: INFO nova.scheduler.client.report [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Deleted allocations for instance 1a5a235a-477f-4da5-b5c1-ee057211cce8 [ 1065.596825] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "refresh_cache-522e778b-6e01-4554-a3eb-dd1efa7870de" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1065.596992] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquired lock "refresh_cache-522e778b-6e01-4554-a3eb-dd1efa7870de" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1065.597199] env[62522]: DEBUG nova.network.neutron [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1065.608286] env[62522]: DEBUG nova.compute.manager [req-871e0d15-0506-4ab7-9b99-5b8f30daa1d3 req-038d469d-d869-4bc0-b9be-fa0abe0e7f73 service nova] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Received event network-vif-deleted-7fd5b82e-a20b-4752-9751-44487429dc0c {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1065.608395] env[62522]: INFO nova.compute.manager [req-871e0d15-0506-4ab7-9b99-5b8f30daa1d3 req-038d469d-d869-4bc0-b9be-fa0abe0e7f73 service nova] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Neutron deleted interface 7fd5b82e-a20b-4752-9751-44487429dc0c; detaching it from the instance and deleting it from the info cache [ 1065.608487] env[62522]: DEBUG nova.network.neutron [req-871e0d15-0506-4ab7-9b99-5b8f30daa1d3 req-038d469d-d869-4bc0-b9be-fa0abe0e7f73 service nova] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.797905] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-438c3089-4d84-4b05-9ee0-061517ed2e0d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.806773] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-50c56b96-313c-4507-8954-bf45bcac2b04 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Suspending the VM {{(pid=62522) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1065.807394] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-9ad208bf-e628-432c-851f-5ae6c1597016 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.817427] env[62522]: DEBUG oslo_vmware.api [None req-50c56b96-313c-4507-8954-bf45bcac2b04 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 1065.817427] env[62522]: value = "task-2416065" [ 1065.817427] env[62522]: _type = "Task" [ 1065.817427] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.828853] env[62522]: DEBUG oslo_vmware.api [None req-50c56b96-313c-4507-8954-bf45bcac2b04 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416065, 'name': SuspendVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.994023] env[62522]: DEBUG oslo_vmware.api [None req-70e208c0-956a-429a-8bb4-176103810931 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416059, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.023118] env[62522]: DEBUG oslo_vmware.api [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2416064, 'name': PowerOffVM_Task, 'duration_secs': 0.304535} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.023545] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1066.023743] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1066.024068] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c9bc8b55-2a56-4ee0-9840-f99afd120070 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.058620] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b48259f1-852a-4a9d-93ac-8500cc8526bf tempest-ImagesTestJSON-182949557 tempest-ImagesTestJSON-182949557-project-member] Lock "1a5a235a-477f-4da5-b5c1-ee057211cce8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.984s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1066.084247] env[62522]: DEBUG nova.network.neutron [-] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1066.111970] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2c097bff-02be-49af-89f3-8637612283e8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.122055] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1066.122055] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1066.122233] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Deleting the datastore file [datastore2] 548364e9-b19a-4777-8e62-19b8a0594f36 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1066.124170] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ef9d7418-31f4-42f1-bafc-deec1af178b6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.132911] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3d34040-c8a1-4cd4-b816-7f7c345f4ecb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.146338] env[62522]: DEBUG nova.network.neutron [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1066.153082] env[62522]: DEBUG oslo_vmware.api [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for the task: (returnval){ [ 1066.153082] env[62522]: value = "task-2416067" [ 1066.153082] env[62522]: _type = "Task" [ 1066.153082] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.165549] env[62522]: DEBUG oslo_vmware.api [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2416067, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.179379] env[62522]: DEBUG nova.compute.manager [req-871e0d15-0506-4ab7-9b99-5b8f30daa1d3 req-038d469d-d869-4bc0-b9be-fa0abe0e7f73 service nova] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Detach interface failed, port_id=7fd5b82e-a20b-4752-9751-44487429dc0c, reason: Instance 8539afc0-1753-4c37-9fc9-25ec97b97243 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1066.331485] env[62522]: DEBUG oslo_vmware.api [None req-50c56b96-313c-4507-8954-bf45bcac2b04 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416065, 'name': SuspendVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.335672] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae5e0b8-5328-4123-820d-4b697d1a1871 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.339616] env[62522]: DEBUG nova.network.neutron [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Updating instance_info_cache with network_info: [{"id": "b258477a-b39a-4d17-975a-087d4d6d41bd", "address": "fa:16:3e:eb:3a:30", "network": {"id": "2037da36-cd13-4cb1-95f4-08d1e174336c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1895994075-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00b27498c07344d1bf9cecefa0fca033", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb258477a-b3", "ovs_interfaceid": "b258477a-b39a-4d17-975a-087d4d6d41bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1066.347063] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91935d9e-a9f1-4fed-af7b-600a79bdeab4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.382801] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7625cbec-47a0-4dfd-b373-ab8a42d02f36 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.392099] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82054a3d-14c7-455a-aa7c-a344ce37197d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.412378] env[62522]: DEBUG nova.compute.provider_tree [None req-22dd138e-3c7e-4866-97bc-846cd9a311ff tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1066.494788] env[62522]: DEBUG oslo_vmware.api [None req-70e208c0-956a-429a-8bb4-176103810931 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416059, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.587526] env[62522]: INFO nova.compute.manager [-] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Took 1.56 seconds to deallocate network for instance. [ 1066.669177] env[62522]: DEBUG oslo_vmware.api [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Task: {'id': task-2416067, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.366786} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.669520] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1066.669752] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1066.669988] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1066.670239] env[62522]: INFO nova.compute.manager [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1066.670663] env[62522]: DEBUG oslo.service.loopingcall [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1066.670917] env[62522]: DEBUG nova.compute.manager [-] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1066.671142] env[62522]: DEBUG nova.network.neutron [-] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1066.774740] env[62522]: DEBUG oslo_vmware.rw_handles [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525eeb25-8cc9-e1d1-ddf4-a1b9d04c64d6/disk-0.vmdk. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1066.778134] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6faa2c8e-834d-4866-ac72-d3de73ac4c9e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.783637] env[62522]: DEBUG oslo_vmware.rw_handles [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525eeb25-8cc9-e1d1-ddf4-a1b9d04c64d6/disk-0.vmdk is in state: ready. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1066.784083] env[62522]: ERROR oslo_vmware.rw_handles [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525eeb25-8cc9-e1d1-ddf4-a1b9d04c64d6/disk-0.vmdk due to incomplete transfer. [ 1066.785223] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-634c2cad-e9e4-4fbb-8d89-76e72843bde7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.797123] env[62522]: DEBUG oslo_vmware.rw_handles [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525eeb25-8cc9-e1d1-ddf4-a1b9d04c64d6/disk-0.vmdk. {{(pid=62522) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1066.797894] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Uploaded image 0f0df71d-7d6e-452b-9dfd-236a14f4f7a2 to the Glance image server {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1066.802021] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Destroying the VM {{(pid=62522) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1066.802021] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-6486abd9-7be3-4032-b6d6-0e0ba2c9f4f1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.809389] env[62522]: DEBUG oslo_vmware.api [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1066.809389] env[62522]: value = "task-2416068" [ 1066.809389] env[62522]: _type = "Task" [ 1066.809389] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.820275] env[62522]: DEBUG oslo_vmware.api [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416068, 'name': Destroy_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.830895] env[62522]: DEBUG oslo_vmware.api [None req-50c56b96-313c-4507-8954-bf45bcac2b04 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416065, 'name': SuspendVM_Task, 'duration_secs': 0.807722} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.831325] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-50c56b96-313c-4507-8954-bf45bcac2b04 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Suspended the VM {{(pid=62522) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1066.831589] env[62522]: DEBUG nova.compute.manager [None req-50c56b96-313c-4507-8954-bf45bcac2b04 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1066.833274] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71d87670-e970-4309-b81e-cf7d0e3980c4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.844540] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Releasing lock "refresh_cache-522e778b-6e01-4554-a3eb-dd1efa7870de" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1066.844919] env[62522]: DEBUG nova.compute.manager [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Instance network_info: |[{"id": "b258477a-b39a-4d17-975a-087d4d6d41bd", "address": "fa:16:3e:eb:3a:30", "network": {"id": "2037da36-cd13-4cb1-95f4-08d1e174336c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1895994075-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00b27498c07344d1bf9cecefa0fca033", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb258477a-b3", "ovs_interfaceid": "b258477a-b39a-4d17-975a-087d4d6d41bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1066.845560] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:eb:3a:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f65996a3-f865-4492-9377-cd14ec8b3aae', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b258477a-b39a-4d17-975a-087d4d6d41bd', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1066.855259] env[62522]: DEBUG oslo.service.loopingcall [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1066.855259] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1066.855259] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aef9381a-61d7-416d-a17c-8b5aebb44ee2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.880946] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1066.880946] env[62522]: value = "task-2416069" [ 1066.880946] env[62522]: _type = "Task" [ 1066.880946] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.890884] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416069, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.957438] env[62522]: DEBUG nova.scheduler.client.report [None req-22dd138e-3c7e-4866-97bc-846cd9a311ff tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Updated inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with generation 126 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1066.957905] env[62522]: DEBUG nova.compute.provider_tree [None req-22dd138e-3c7e-4866-97bc-846cd9a311ff tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Updating resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 generation from 126 to 127 during operation: update_inventory {{(pid=62522) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1066.958010] env[62522]: DEBUG nova.compute.provider_tree [None req-22dd138e-3c7e-4866-97bc-846cd9a311ff tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1067.005333] env[62522]: DEBUG oslo_vmware.api [None req-70e208c0-956a-429a-8bb4-176103810931 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416059, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.016955] env[62522]: DEBUG nova.compute.manager [req-f631377d-476e-43a6-80d7-84b3f558841b req-889a4731-4f0d-4af6-959e-7db42ebc51d8 service nova] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Received event network-changed-b258477a-b39a-4d17-975a-087d4d6d41bd {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1067.018555] env[62522]: DEBUG nova.compute.manager [req-f631377d-476e-43a6-80d7-84b3f558841b req-889a4731-4f0d-4af6-959e-7db42ebc51d8 service nova] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Refreshing instance network info cache due to event network-changed-b258477a-b39a-4d17-975a-087d4d6d41bd. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1067.018555] env[62522]: DEBUG oslo_concurrency.lockutils [req-f631377d-476e-43a6-80d7-84b3f558841b req-889a4731-4f0d-4af6-959e-7db42ebc51d8 service nova] Acquiring lock "refresh_cache-522e778b-6e01-4554-a3eb-dd1efa7870de" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1067.018555] env[62522]: DEBUG oslo_concurrency.lockutils [req-f631377d-476e-43a6-80d7-84b3f558841b req-889a4731-4f0d-4af6-959e-7db42ebc51d8 service nova] Acquired lock "refresh_cache-522e778b-6e01-4554-a3eb-dd1efa7870de" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1067.018555] env[62522]: DEBUG nova.network.neutron [req-f631377d-476e-43a6-80d7-84b3f558841b req-889a4731-4f0d-4af6-959e-7db42ebc51d8 service nova] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Refreshing network info cache for port b258477a-b39a-4d17-975a-087d4d6d41bd {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1067.094808] env[62522]: DEBUG oslo_concurrency.lockutils [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1067.320313] env[62522]: DEBUG oslo_vmware.api [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416068, 'name': Destroy_Task, 'duration_secs': 0.494413} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.320708] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Destroyed the VM [ 1067.321019] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Deleting Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1067.321367] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5e8525a1-cd88-400c-94f5-c435f3fec64a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.329105] env[62522]: DEBUG oslo_vmware.api [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1067.329105] env[62522]: value = "task-2416070" [ 1067.329105] env[62522]: _type = "Task" [ 1067.329105] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.339234] env[62522]: DEBUG oslo_vmware.api [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416070, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.392478] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416069, 'name': CreateVM_Task, 'duration_secs': 0.453864} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.392717] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1067.393396] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1067.393854] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1067.394070] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1067.394337] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b40ad108-c8ce-4a95-a53a-e11d02d15620 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.399775] env[62522]: DEBUG oslo_vmware.api [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1067.399775] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b5895e-3ece-9166-eb0e-226f6d28390a" [ 1067.399775] env[62522]: _type = "Task" [ 1067.399775] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.410434] env[62522]: DEBUG oslo_vmware.api [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b5895e-3ece-9166-eb0e-226f6d28390a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.494952] env[62522]: DEBUG oslo_vmware.api [None req-70e208c0-956a-429a-8bb4-176103810931 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416059, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.513346] env[62522]: DEBUG nova.network.neutron [-] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1067.722385] env[62522]: DEBUG nova.compute.manager [req-fcb497ea-3c81-4420-8557-6c257dcb9e7b req-8d3781e3-c9bd-4c17-9ff4-ea94fd36b332 service nova] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Received event network-vif-deleted-47809969-d413-4587-acbe-3071b4ded420 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1067.757190] env[62522]: DEBUG nova.network.neutron [req-f631377d-476e-43a6-80d7-84b3f558841b req-889a4731-4f0d-4af6-959e-7db42ebc51d8 service nova] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Updated VIF entry in instance network info cache for port b258477a-b39a-4d17-975a-087d4d6d41bd. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1067.757557] env[62522]: DEBUG nova.network.neutron [req-f631377d-476e-43a6-80d7-84b3f558841b req-889a4731-4f0d-4af6-959e-7db42ebc51d8 service nova] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Updating instance_info_cache with network_info: [{"id": "b258477a-b39a-4d17-975a-087d4d6d41bd", "address": "fa:16:3e:eb:3a:30", "network": {"id": "2037da36-cd13-4cb1-95f4-08d1e174336c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1895994075-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00b27498c07344d1bf9cecefa0fca033", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb258477a-b3", "ovs_interfaceid": "b258477a-b39a-4d17-975a-087d4d6d41bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1067.842608] env[62522]: DEBUG oslo_vmware.api [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416070, 'name': RemoveSnapshot_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.912842] env[62522]: DEBUG oslo_vmware.api [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b5895e-3ece-9166-eb0e-226f6d28390a, 'name': SearchDatastore_Task, 'duration_secs': 0.02334} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.912842] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1067.912842] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1067.912842] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1067.912842] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1067.912842] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1067.912842] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-441deaee-ed5f-4262-84dd-e8b8841c4866 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.925195] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1067.925195] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1067.925195] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae6d0537-37a0-4471-94a8-d7f39a2a5551 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.931813] env[62522]: DEBUG oslo_vmware.api [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1067.931813] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52de9c2e-347d-1d28-3617-f2d8f658bf96" [ 1067.931813] env[62522]: _type = "Task" [ 1067.931813] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.941812] env[62522]: DEBUG oslo_vmware.api [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52de9c2e-347d-1d28-3617-f2d8f658bf96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.975639] env[62522]: DEBUG oslo_concurrency.lockutils [None req-22dd138e-3c7e-4866-97bc-846cd9a311ff tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.474s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1067.978458] env[62522]: DEBUG oslo_concurrency.lockutils [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.884s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1067.978678] env[62522]: DEBUG nova.objects.instance [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Lazy-loading 'resources' on Instance uuid 8539afc0-1753-4c37-9fc9-25ec97b97243 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1068.005820] env[62522]: DEBUG oslo_vmware.api [None req-70e208c0-956a-429a-8bb4-176103810931 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416059, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.017025] env[62522]: INFO nova.compute.manager [-] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Took 1.35 seconds to deallocate network for instance. [ 1068.260019] env[62522]: DEBUG oslo_concurrency.lockutils [req-f631377d-476e-43a6-80d7-84b3f558841b req-889a4731-4f0d-4af6-959e-7db42ebc51d8 service nova] Releasing lock "refresh_cache-522e778b-6e01-4554-a3eb-dd1efa7870de" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1068.350382] env[62522]: DEBUG oslo_vmware.api [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416070, 'name': RemoveSnapshot_Task, 'duration_secs': 0.613437} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.350734] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Deleted Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1068.350984] env[62522]: INFO nova.compute.manager [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Took 18.15 seconds to snapshot the instance on the hypervisor. [ 1068.446808] env[62522]: DEBUG oslo_vmware.api [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52de9c2e-347d-1d28-3617-f2d8f658bf96, 'name': SearchDatastore_Task, 'duration_secs': 0.015116} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.447759] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65733ca7-ba59-447e-a06b-69c99e9d437c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.455220] env[62522]: DEBUG oslo_vmware.api [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1068.455220] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52ba7156-9866-3026-d5cc-566af27d7354" [ 1068.455220] env[62522]: _type = "Task" [ 1068.455220] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.466235] env[62522]: DEBUG oslo_vmware.api [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52ba7156-9866-3026-d5cc-566af27d7354, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.505636] env[62522]: DEBUG oslo_vmware.api [None req-70e208c0-956a-429a-8bb4-176103810931 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416059, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.527744] env[62522]: DEBUG oslo_concurrency.lockutils [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1068.564673] env[62522]: INFO nova.scheduler.client.report [None req-22dd138e-3c7e-4866-97bc-846cd9a311ff tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Deleted allocation for migration 31f1ffb8-0f4c-4352-bb6b-cf4a60fb08b0 [ 1068.673018] env[62522]: INFO nova.compute.manager [None req-7532f8fc-9214-479c-91f5-21420e6e7f87 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Resuming [ 1068.673717] env[62522]: DEBUG nova.objects.instance [None req-7532f8fc-9214-479c-91f5-21420e6e7f87 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lazy-loading 'flavor' on Instance uuid 04a9d357-d094-487b-8f09-2f7e0c35f0d7 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1068.766472] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-692d49b5-b506-4d83-97e0-49af9836ce0b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.781266] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-608c2185-b7f9-447f-9c95-34325e8e4ed9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.821544] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b87980-33a8-47a4-9710-f9f36b6a991c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.830013] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e21e870-ad65-4190-8ad1-004258c2dd67 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.846266] env[62522]: DEBUG nova.compute.provider_tree [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1068.928118] env[62522]: DEBUG nova.compute.manager [None req-d2bc86f8-5145-4e5b-9b95-46f3b05ce55d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Found 1 images (rotation: 2) {{(pid=62522) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1068.967279] env[62522]: DEBUG oslo_vmware.api [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52ba7156-9866-3026-d5cc-566af27d7354, 'name': SearchDatastore_Task, 'duration_secs': 0.016312} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.967279] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1068.967279] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 522e778b-6e01-4554-a3eb-dd1efa7870de/522e778b-6e01-4554-a3eb-dd1efa7870de.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1068.967551] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4dcb929d-2571-4579-a043-4c9988c68191 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.975489] env[62522]: DEBUG oslo_vmware.api [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1068.975489] env[62522]: value = "task-2416071" [ 1068.975489] env[62522]: _type = "Task" [ 1068.975489] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.985389] env[62522]: DEBUG oslo_vmware.api [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416071, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.001643] env[62522]: DEBUG oslo_vmware.api [None req-70e208c0-956a-429a-8bb4-176103810931 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416059, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.072991] env[62522]: DEBUG oslo_concurrency.lockutils [None req-22dd138e-3c7e-4866-97bc-846cd9a311ff tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "cabe40a0-8bd0-4d77-b949-298bd194fa42" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 11.032s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1069.351964] env[62522]: DEBUG nova.scheduler.client.report [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1069.490306] env[62522]: DEBUG oslo_vmware.api [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416071, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.505633] env[62522]: DEBUG oslo_vmware.api [None req-70e208c0-956a-429a-8bb4-176103810931 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416059, 'name': ReconfigVM_Task, 'duration_secs': 5.807547} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.505919] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70e208c0-956a-429a-8bb4-176103810931 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Releasing lock "fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1069.506394] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-70e208c0-956a-429a-8bb4-176103810931 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Reconfigured VM to detach interface {{(pid=62522) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1069.791069] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c0840bc0-0684-45db-bc38-154ec9529bb8 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "cabe40a0-8bd0-4d77-b949-298bd194fa42" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1069.791195] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c0840bc0-0684-45db-bc38-154ec9529bb8 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "cabe40a0-8bd0-4d77-b949-298bd194fa42" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1069.791675] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c0840bc0-0684-45db-bc38-154ec9529bb8 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "cabe40a0-8bd0-4d77-b949-298bd194fa42-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1069.791675] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c0840bc0-0684-45db-bc38-154ec9529bb8 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "cabe40a0-8bd0-4d77-b949-298bd194fa42-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1069.791959] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c0840bc0-0684-45db-bc38-154ec9529bb8 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "cabe40a0-8bd0-4d77-b949-298bd194fa42-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1069.796688] env[62522]: INFO nova.compute.manager [None req-c0840bc0-0684-45db-bc38-154ec9529bb8 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Terminating instance [ 1069.861749] env[62522]: DEBUG oslo_concurrency.lockutils [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.883s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1069.865459] env[62522]: DEBUG oslo_concurrency.lockutils [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.337s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1069.865749] env[62522]: DEBUG nova.objects.instance [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Lazy-loading 'resources' on Instance uuid 548364e9-b19a-4777-8e62-19b8a0594f36 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1069.893300] env[62522]: INFO nova.scheduler.client.report [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Deleted allocations for instance 8539afc0-1753-4c37-9fc9-25ec97b97243 [ 1069.991130] env[62522]: DEBUG oslo_vmware.api [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416071, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.754608} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.991447] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 522e778b-6e01-4554-a3eb-dd1efa7870de/522e778b-6e01-4554-a3eb-dd1efa7870de.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1069.991711] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1069.992018] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-47fbcaf2-3ddd-49e9-b84e-df1eb6dab5df {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.000877] env[62522]: DEBUG oslo_vmware.api [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1070.000877] env[62522]: value = "task-2416072" [ 1070.000877] env[62522]: _type = "Task" [ 1070.000877] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.011647] env[62522]: DEBUG oslo_vmware.api [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416072, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.150023] env[62522]: DEBUG nova.compute.manager [req-26f1c1b2-707f-4b87-86b1-d5161a49a0b1 req-fd6a835b-4595-4a35-8bbf-8cb7baf8cd3f service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Received event network-vif-deleted-0ac91806-75b5-459d-8243-019320a7daf0 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1070.150304] env[62522]: INFO nova.compute.manager [req-26f1c1b2-707f-4b87-86b1-d5161a49a0b1 req-fd6a835b-4595-4a35-8bbf-8cb7baf8cd3f service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Neutron deleted interface 0ac91806-75b5-459d-8243-019320a7daf0; detaching it from the instance and deleting it from the info cache [ 1070.150635] env[62522]: DEBUG nova.network.neutron [req-26f1c1b2-707f-4b87-86b1-d5161a49a0b1 req-fd6a835b-4595-4a35-8bbf-8cb7baf8cd3f service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Updating instance_info_cache with network_info: [{"id": "954fee91-36f2-497a-a856-6828a519a456", "address": "fa:16:3e:df:f4:48", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.136", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap954fee91-36", "ovs_interfaceid": "954fee91-36f2-497a-a856-6828a519a456", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fa421858-7ef8-4e24-94ec-cb1477a79f22", "address": "fa:16:3e:2f:7c:ba", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa421858-7e", "ovs_interfaceid": "fa421858-7ef8-4e24-94ec-cb1477a79f22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1070.185614] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7532f8fc-9214-479c-91f5-21420e6e7f87 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquiring lock "refresh_cache-04a9d357-d094-487b-8f09-2f7e0c35f0d7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1070.187029] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7532f8fc-9214-479c-91f5-21420e6e7f87 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquired lock "refresh_cache-04a9d357-d094-487b-8f09-2f7e0c35f0d7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1070.187029] env[62522]: DEBUG nova.network.neutron [None req-7532f8fc-9214-479c-91f5-21420e6e7f87 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1070.307022] env[62522]: DEBUG nova.compute.manager [None req-c0840bc0-0684-45db-bc38-154ec9529bb8 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1070.307022] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c0840bc0-0684-45db-bc38-154ec9529bb8 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1070.307022] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1d7eecf-a436-4383-9a53-aee5b2319e00 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.316339] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0840bc0-0684-45db-bc38-154ec9529bb8 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1070.316339] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7bf83609-a3d7-48e0-b02e-f8bc11ab904c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.320675] env[62522]: DEBUG nova.compute.manager [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1070.320675] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0c78e42-ede1-4a2e-9864-fd13ac0fc9f3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.325459] env[62522]: DEBUG oslo_vmware.api [None req-c0840bc0-0684-45db-bc38-154ec9529bb8 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 1070.325459] env[62522]: value = "task-2416073" [ 1070.325459] env[62522]: _type = "Task" [ 1070.325459] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.340015] env[62522]: DEBUG oslo_vmware.api [None req-c0840bc0-0684-45db-bc38-154ec9529bb8 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2416073, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.404902] env[62522]: DEBUG oslo_concurrency.lockutils [None req-00dd405a-1d38-433d-a2d3-a42fcc2a3e38 tempest-ServersTestManualDisk-1600812027 tempest-ServersTestManualDisk-1600812027-project-member] Lock "8539afc0-1753-4c37-9fc9-25ec97b97243" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.038s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1070.523688] env[62522]: DEBUG oslo_vmware.api [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416072, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072988} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.525104] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1070.526316] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01a827f8-2e3c-4e3c-9e06-f8979de8557b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.558059] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] 522e778b-6e01-4554-a3eb-dd1efa7870de/522e778b-6e01-4554-a3eb-dd1efa7870de.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1070.561614] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f48df34-34c6-4526-bd5e-3c9ec46b3375 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.584544] env[62522]: DEBUG oslo_vmware.api [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1070.584544] env[62522]: value = "task-2416074" [ 1070.584544] env[62522]: _type = "Task" [ 1070.584544] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.598372] env[62522]: DEBUG oslo_vmware.api [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416074, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.653758] env[62522]: DEBUG oslo_concurrency.lockutils [req-26f1c1b2-707f-4b87-86b1-d5161a49a0b1 req-fd6a835b-4595-4a35-8bbf-8cb7baf8cd3f service nova] Acquiring lock "fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1070.653932] env[62522]: DEBUG oslo_concurrency.lockutils [req-26f1c1b2-707f-4b87-86b1-d5161a49a0b1 req-fd6a835b-4595-4a35-8bbf-8cb7baf8cd3f service nova] Acquired lock "fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1070.655820] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54296367-11bf-4d58-9170-b129154f2c20 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.679629] env[62522]: DEBUG oslo_concurrency.lockutils [req-26f1c1b2-707f-4b87-86b1-d5161a49a0b1 req-fd6a835b-4595-4a35-8bbf-8cb7baf8cd3f service nova] Releasing lock "fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1070.679629] env[62522]: WARNING nova.compute.manager [req-26f1c1b2-707f-4b87-86b1-d5161a49a0b1 req-fd6a835b-4595-4a35-8bbf-8cb7baf8cd3f service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Detach interface failed, port_id=0ac91806-75b5-459d-8243-019320a7daf0, reason: No device with interface-id 0ac91806-75b5-459d-8243-019320a7daf0 exists on VM: nova.exception.NotFound: No device with interface-id 0ac91806-75b5-459d-8243-019320a7daf0 exists on VM [ 1070.702303] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59b93c54-29d1-4622-a41c-b74bb017daae {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.710677] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7974d0e1-d8fa-4738-ae1a-24175dcb504c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.747821] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a195e0c3-5e42-4bec-b435-934acc1e0288 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.758724] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e0e14e2-a9b8-44e6-9d86-5ee0054b7ab7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.775670] env[62522]: DEBUG nova.compute.provider_tree [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1070.804369] env[62522]: DEBUG oslo_concurrency.lockutils [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "9337449d-5aff-4170-83ea-42fe2e9d1657" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1070.804821] env[62522]: DEBUG oslo_concurrency.lockutils [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "9337449d-5aff-4170-83ea-42fe2e9d1657" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1070.835198] env[62522]: INFO nova.compute.manager [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] instance snapshotting [ 1070.835889] env[62522]: DEBUG nova.objects.instance [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lazy-loading 'flavor' on Instance uuid 7f8a8270-5014-446c-aa42-ea0b4079e5a9 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1070.843220] env[62522]: DEBUG oslo_vmware.api [None req-c0840bc0-0684-45db-bc38-154ec9529bb8 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2416073, 'name': PowerOffVM_Task, 'duration_secs': 0.270542} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.843704] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0840bc0-0684-45db-bc38-154ec9529bb8 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1070.843971] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c0840bc0-0684-45db-bc38-154ec9529bb8 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1070.844331] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eb59c476-b3f2-48c9-bf8b-64204fdae3ae {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.923097] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c0840bc0-0684-45db-bc38-154ec9529bb8 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1070.923097] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c0840bc0-0684-45db-bc38-154ec9529bb8 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1070.923097] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0840bc0-0684-45db-bc38-154ec9529bb8 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Deleting the datastore file [datastore2] cabe40a0-8bd0-4d77-b949-298bd194fa42 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1070.923097] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-052d68ac-2823-44ab-ab84-abf4b1885a1d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.931086] env[62522]: DEBUG oslo_vmware.api [None req-c0840bc0-0684-45db-bc38-154ec9529bb8 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 1070.931086] env[62522]: value = "task-2416076" [ 1070.931086] env[62522]: _type = "Task" [ 1070.931086] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.942491] env[62522]: DEBUG oslo_vmware.api [None req-c0840bc0-0684-45db-bc38-154ec9529bb8 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2416076, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.978453] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70e208c0-956a-429a-8bb4-176103810931 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "refresh_cache-fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1070.978784] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70e208c0-956a-429a-8bb4-176103810931 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired lock "refresh_cache-fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1070.979147] env[62522]: DEBUG nova.network.neutron [None req-70e208c0-956a-429a-8bb4-176103810931 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1071.098869] env[62522]: DEBUG oslo_vmware.api [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416074, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.195078] env[62522]: DEBUG nova.network.neutron [None req-7532f8fc-9214-479c-91f5-21420e6e7f87 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Updating instance_info_cache with network_info: [{"id": "7e36641e-fc4a-4223-ab07-33dc49821168", "address": "fa:16:3e:f1:bf:49", "network": {"id": "b837f0fb-c2e1-46dd-93b2-62d6c4352316", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1813744063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed393a0454b643eea75c203d1dfd592c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e36641e-fc", "ovs_interfaceid": "7e36641e-fc4a-4223-ab07-33dc49821168", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1071.301293] env[62522]: ERROR nova.scheduler.client.report [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] [req-6db767de-346a-4513-b667-83af343c98bb] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c7fa38b2-245d-4337-a012-22c1a01c0a72. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6db767de-346a-4513-b667-83af343c98bb"}]} [ 1071.308452] env[62522]: DEBUG nova.compute.manager [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1071.329992] env[62522]: DEBUG nova.scheduler.client.report [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Refreshing inventories for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1071.349089] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a13eb1-5d16-42a4-910f-62791ba752f1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.384495] env[62522]: DEBUG nova.scheduler.client.report [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Updating ProviderTree inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1071.385050] env[62522]: DEBUG nova.compute.provider_tree [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1071.388246] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cacd0253-d8aa-47e1-94a3-57270f1b63dc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.406617] env[62522]: DEBUG nova.scheduler.client.report [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Refreshing aggregate associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, aggregates: None {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1071.442582] env[62522]: DEBUG oslo_vmware.api [None req-c0840bc0-0684-45db-bc38-154ec9529bb8 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2416076, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.224361} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.446082] env[62522]: DEBUG nova.scheduler.client.report [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Refreshing trait associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1071.446082] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0840bc0-0684-45db-bc38-154ec9529bb8 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1071.446231] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c0840bc0-0684-45db-bc38-154ec9529bb8 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1071.446411] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-c0840bc0-0684-45db-bc38-154ec9529bb8 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1071.446624] env[62522]: INFO nova.compute.manager [None req-c0840bc0-0684-45db-bc38-154ec9529bb8 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1071.446892] env[62522]: DEBUG oslo.service.loopingcall [None req-c0840bc0-0684-45db-bc38-154ec9529bb8 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1071.447338] env[62522]: DEBUG nova.compute.manager [-] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1071.447435] env[62522]: DEBUG nova.network.neutron [-] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1071.570921] env[62522]: DEBUG oslo_concurrency.lockutils [None req-22d47536-309b-4a82-ab94-b0b3f889201d tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "bf44e269-0297-473e-b6ce-04a40d0ec1b4" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.571517] env[62522]: DEBUG oslo_concurrency.lockutils [None req-22d47536-309b-4a82-ab94-b0b3f889201d tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "bf44e269-0297-473e-b6ce-04a40d0ec1b4" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.599965] env[62522]: DEBUG oslo_vmware.api [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416074, 'name': ReconfigVM_Task, 'duration_secs': 0.54595} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.600954] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Reconfigured VM instance instance-0000005a to attach disk [datastore1] 522e778b-6e01-4554-a3eb-dd1efa7870de/522e778b-6e01-4554-a3eb-dd1efa7870de.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1071.602468] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8b4d0c27-f970-4b6b-9874-66e857241a8b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.615868] env[62522]: DEBUG oslo_vmware.api [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1071.615868] env[62522]: value = "task-2416077" [ 1071.615868] env[62522]: _type = "Task" [ 1071.615868] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.630365] env[62522]: DEBUG oslo_vmware.api [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416077, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.699424] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7532f8fc-9214-479c-91f5-21420e6e7f87 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Releasing lock "refresh_cache-04a9d357-d094-487b-8f09-2f7e0c35f0d7" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1071.699424] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0909ac3d-6285-4c23-9d48-7e44b59bbbcf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.712702] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7532f8fc-9214-479c-91f5-21420e6e7f87 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Resuming the VM {{(pid=62522) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1071.719140] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3dbb49c8-764f-4359-a480-4e14a09b6369 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.730359] env[62522]: DEBUG oslo_vmware.api [None req-7532f8fc-9214-479c-91f5-21420e6e7f87 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 1071.730359] env[62522]: value = "task-2416078" [ 1071.730359] env[62522]: _type = "Task" [ 1071.730359] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.740683] env[62522]: DEBUG oslo_vmware.api [None req-7532f8fc-9214-479c-91f5-21420e6e7f87 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416078, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.772647] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e069b4fe-6987-4eee-865e-84beffdaa31c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.781363] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9d14982-ed6d-45d5-854e-089a647cf949 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.826232] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-253875e3-c8cd-4866-9ee7-f98554f634f2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.836486] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe45e94f-6216-4d26-ac09-f764d2d6a369 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.845280] env[62522]: DEBUG oslo_concurrency.lockutils [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.859200] env[62522]: DEBUG nova.compute.provider_tree [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1071.902887] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Creating Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1071.903340] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-3a90cb4d-46fc-4c75-9ed8-49d8eba4de4f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.912613] env[62522]: DEBUG oslo_vmware.api [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1071.912613] env[62522]: value = "task-2416079" [ 1071.912613] env[62522]: _type = "Task" [ 1071.912613] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.924200] env[62522]: DEBUG oslo_vmware.api [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416079, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.047950] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "fcd0eef6-d059-4495-a982-058b6c9626d1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1072.047950] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "fcd0eef6-d059-4495-a982-058b6c9626d1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1072.047950] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "fcd0eef6-d059-4495-a982-058b6c9626d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1072.047950] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "fcd0eef6-d059-4495-a982-058b6c9626d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1072.047950] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "fcd0eef6-d059-4495-a982-058b6c9626d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.051291] env[62522]: INFO nova.compute.manager [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Terminating instance [ 1072.078949] env[62522]: INFO nova.compute.manager [None req-22d47536-309b-4a82-ab94-b0b3f889201d tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Detaching volume 56b832bf-8626-456e-9706-070e3adf329d [ 1072.133173] env[62522]: DEBUG oslo_vmware.api [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416077, 'name': Rename_Task, 'duration_secs': 0.192137} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.135742] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1072.135742] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ebb6babb-93d0-482a-afa4-3c07b426d67a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.143039] env[62522]: INFO nova.virt.block_device [None req-22d47536-309b-4a82-ab94-b0b3f889201d tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Attempting to driver detach volume 56b832bf-8626-456e-9706-070e3adf329d from mountpoint /dev/sdb [ 1072.143941] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-22d47536-309b-4a82-ab94-b0b3f889201d tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Volume detach. Driver type: vmdk {{(pid=62522) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1072.143941] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-22d47536-309b-4a82-ab94-b0b3f889201d tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489789', 'volume_id': '56b832bf-8626-456e-9706-070e3adf329d', 'name': 'volume-56b832bf-8626-456e-9706-070e3adf329d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'bf44e269-0297-473e-b6ce-04a40d0ec1b4', 'attached_at': '', 'detached_at': '', 'volume_id': '56b832bf-8626-456e-9706-070e3adf329d', 'serial': '56b832bf-8626-456e-9706-070e3adf329d'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1072.144856] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f992dab-02ee-42da-84ba-758345c51443 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.149565] env[62522]: DEBUG oslo_vmware.api [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1072.149565] env[62522]: value = "task-2416080" [ 1072.149565] env[62522]: _type = "Task" [ 1072.149565] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.186571] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be9750ce-d78e-48a7-835a-42ddcd9fd446 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.196340] env[62522]: DEBUG oslo_vmware.api [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416080, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.200880] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-787bcf01-a4ed-490d-a3d8-91eff53a0a7d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.227405] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-679594c8-f338-4edc-b30b-ad34338711a4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.241490] env[62522]: DEBUG oslo_vmware.api [None req-7532f8fc-9214-479c-91f5-21420e6e7f87 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416078, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.253164] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-22d47536-309b-4a82-ab94-b0b3f889201d tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] The volume has not been displaced from its original location: [datastore1] volume-56b832bf-8626-456e-9706-070e3adf329d/volume-56b832bf-8626-456e-9706-070e3adf329d.vmdk. No consolidation needed. {{(pid=62522) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1072.259329] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-22d47536-309b-4a82-ab94-b0b3f889201d tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Reconfiguring VM instance instance-00000033 to detach disk 2001 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1072.264027] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-abfadc8c-04d5-49cb-97a2-20a0c078c653 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.280265] env[62522]: DEBUG nova.compute.manager [req-e6fb195c-14d7-4917-909c-8bf7730eb8d6 req-96b22492-0e44-4e96-9db0-d2c500da2b5f service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Received event network-vif-deleted-fa421858-7ef8-4e24-94ec-cb1477a79f22 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1072.280265] env[62522]: INFO nova.compute.manager [req-e6fb195c-14d7-4917-909c-8bf7730eb8d6 req-96b22492-0e44-4e96-9db0-d2c500da2b5f service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Neutron deleted interface fa421858-7ef8-4e24-94ec-cb1477a79f22; detaching it from the instance and deleting it from the info cache [ 1072.280644] env[62522]: DEBUG nova.network.neutron [req-e6fb195c-14d7-4917-909c-8bf7730eb8d6 req-96b22492-0e44-4e96-9db0-d2c500da2b5f service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Updating instance_info_cache with network_info: [{"id": "954fee91-36f2-497a-a856-6828a519a456", "address": "fa:16:3e:df:f4:48", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.136", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap954fee91-36", "ovs_interfaceid": "954fee91-36f2-497a-a856-6828a519a456", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.294036] env[62522]: DEBUG oslo_vmware.api [None req-22d47536-309b-4a82-ab94-b0b3f889201d tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 1072.294036] env[62522]: value = "task-2416081" [ 1072.294036] env[62522]: _type = "Task" [ 1072.294036] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.309847] env[62522]: DEBUG oslo_vmware.api [None req-22d47536-309b-4a82-ab94-b0b3f889201d tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2416081, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.362616] env[62522]: DEBUG nova.scheduler.client.report [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1072.425764] env[62522]: DEBUG oslo_vmware.api [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416079, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.504163] env[62522]: DEBUG nova.network.neutron [-] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.533761] env[62522]: INFO nova.network.neutron [None req-70e208c0-956a-429a-8bb4-176103810931 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Port fa421858-7ef8-4e24-94ec-cb1477a79f22 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1072.534467] env[62522]: DEBUG nova.network.neutron [None req-70e208c0-956a-429a-8bb4-176103810931 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Updating instance_info_cache with network_info: [{"id": "954fee91-36f2-497a-a856-6828a519a456", "address": "fa:16:3e:df:f4:48", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.136", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap954fee91-36", "ovs_interfaceid": "954fee91-36f2-497a-a856-6828a519a456", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.556579] env[62522]: DEBUG nova.compute.manager [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1072.556772] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1072.558568] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb64d373-75be-419a-bcc6-ec5a99883900 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.571775] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1072.572113] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6ff28763-f730-4de4-9827-6608afa2ff6b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.587256] env[62522]: DEBUG oslo_vmware.api [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 1072.587256] env[62522]: value = "task-2416082" [ 1072.587256] env[62522]: _type = "Task" [ 1072.587256] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.597869] env[62522]: DEBUG oslo_vmware.api [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416082, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.667757] env[62522]: DEBUG oslo_vmware.api [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416080, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.741556] env[62522]: DEBUG oslo_vmware.api [None req-7532f8fc-9214-479c-91f5-21420e6e7f87 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416078, 'name': PowerOnVM_Task, 'duration_secs': 0.604948} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.741956] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7532f8fc-9214-479c-91f5-21420e6e7f87 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Resumed the VM {{(pid=62522) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1072.742133] env[62522]: DEBUG nova.compute.manager [None req-7532f8fc-9214-479c-91f5-21420e6e7f87 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1072.742987] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d6a5b1c-6195-465c-849c-f3829ffcad1c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.787044] env[62522]: DEBUG oslo_concurrency.lockutils [req-e6fb195c-14d7-4917-909c-8bf7730eb8d6 req-96b22492-0e44-4e96-9db0-d2c500da2b5f service nova] Acquiring lock "fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1072.806561] env[62522]: DEBUG oslo_vmware.api [None req-22d47536-309b-4a82-ab94-b0b3f889201d tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2416081, 'name': ReconfigVM_Task, 'duration_secs': 0.337075} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.807087] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-22d47536-309b-4a82-ab94-b0b3f889201d tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Reconfigured VM instance instance-00000033 to detach disk 2001 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1072.812537] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6caeae14-f43d-43b6-b268-e16fd33b3559 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.833545] env[62522]: DEBUG oslo_vmware.api [None req-22d47536-309b-4a82-ab94-b0b3f889201d tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 1072.833545] env[62522]: value = "task-2416083" [ 1072.833545] env[62522]: _type = "Task" [ 1072.833545] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.845316] env[62522]: DEBUG oslo_vmware.api [None req-22d47536-309b-4a82-ab94-b0b3f889201d tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2416083, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.867518] env[62522]: DEBUG oslo_concurrency.lockutils [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.003s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.875536] env[62522]: DEBUG oslo_concurrency.lockutils [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.031s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1072.877369] env[62522]: INFO nova.compute.claims [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1072.919636] env[62522]: INFO nova.scheduler.client.report [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Deleted allocations for instance 548364e9-b19a-4777-8e62-19b8a0594f36 [ 1072.929091] env[62522]: DEBUG oslo_vmware.api [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416079, 'name': CreateSnapshot_Task, 'duration_secs': 0.986408} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.932732] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Created Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1072.932732] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-567f2d85-965e-439d-82d9-cf5740558a10 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.008810] env[62522]: INFO nova.compute.manager [-] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Took 1.56 seconds to deallocate network for instance. [ 1073.040803] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70e208c0-956a-429a-8bb4-176103810931 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Releasing lock "refresh_cache-fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1073.102508] env[62522]: DEBUG oslo_vmware.api [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416082, 'name': PowerOffVM_Task, 'duration_secs': 0.328092} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.102508] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1073.102508] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1073.102508] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a077b841-ca8a-45ad-9ca5-1184750a55ff {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.164181] env[62522]: DEBUG oslo_vmware.api [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416080, 'name': PowerOnVM_Task, 'duration_secs': 0.786914} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.168016] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1073.168016] env[62522]: INFO nova.compute.manager [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Took 9.46 seconds to spawn the instance on the hypervisor. [ 1073.168016] env[62522]: DEBUG nova.compute.manager [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1073.168016] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4d97152-062a-443b-a396-960eed6621ec {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.227836] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1073.231021] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1073.231021] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Deleting the datastore file [datastore2] fcd0eef6-d059-4495-a982-058b6c9626d1 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1073.231021] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d296e162-a55d-4caa-8d9b-cb238c2f3b70 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.239035] env[62522]: DEBUG oslo_vmware.api [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 1073.239035] env[62522]: value = "task-2416085" [ 1073.239035] env[62522]: _type = "Task" [ 1073.239035] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.250289] env[62522]: DEBUG oslo_vmware.api [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416085, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.345600] env[62522]: DEBUG oslo_vmware.api [None req-22d47536-309b-4a82-ab94-b0b3f889201d tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2416083, 'name': ReconfigVM_Task, 'duration_secs': 0.178799} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.345925] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-22d47536-309b-4a82-ab94-b0b3f889201d tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489789', 'volume_id': '56b832bf-8626-456e-9706-070e3adf329d', 'name': 'volume-56b832bf-8626-456e-9706-070e3adf329d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'bf44e269-0297-473e-b6ce-04a40d0ec1b4', 'attached_at': '', 'detached_at': '', 'volume_id': '56b832bf-8626-456e-9706-070e3adf329d', 'serial': '56b832bf-8626-456e-9706-070e3adf329d'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1073.436010] env[62522]: DEBUG oslo_concurrency.lockutils [None req-13465fe3-0af4-4b1d-b6ed-226e8ab74c8e tempest-ServerRescueTestJSON-123201904 tempest-ServerRescueTestJSON-123201904-project-member] Lock "548364e9-b19a-4777-8e62-19b8a0594f36" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.478s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1073.453689] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Creating linked-clone VM from snapshot {{(pid=62522) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1073.454135] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a54310c0-1ff0-4bc8-afe8-0d1a1fb92ddf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.465112] env[62522]: DEBUG oslo_vmware.api [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1073.465112] env[62522]: value = "task-2416086" [ 1073.465112] env[62522]: _type = "Task" [ 1073.465112] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.532374] env[62522]: DEBUG oslo_vmware.api [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416086, 'name': CloneVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.532374] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c0840bc0-0684-45db-bc38-154ec9529bb8 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.546459] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70e208c0-956a-429a-8bb4-176103810931 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "interface-fcd0eef6-d059-4495-a982-058b6c9626d1-0ac91806-75b5-459d-8243-019320a7daf0" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.652s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1073.694538] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Acquiring lock "35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.694658] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Lock "35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1073.694863] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Acquiring lock "35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.695064] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Lock "35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1073.695240] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Lock "35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1073.698174] env[62522]: INFO nova.compute.manager [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Took 15.04 seconds to build instance. [ 1073.702840] env[62522]: INFO nova.compute.manager [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Terminating instance [ 1073.705444] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Acquiring lock "783d9ae7-67f5-4c54-81a7-6715b762afb3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.705444] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Lock "783d9ae7-67f5-4c54-81a7-6715b762afb3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1073.756188] env[62522]: DEBUG oslo_vmware.api [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416085, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.354442} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.756188] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1073.756188] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1073.756465] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1073.756642] env[62522]: INFO nova.compute.manager [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1073.759836] env[62522]: DEBUG oslo.service.loopingcall [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1073.759836] env[62522]: DEBUG nova.compute.manager [-] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1073.759836] env[62522]: DEBUG nova.network.neutron [-] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1073.908197] env[62522]: DEBUG nova.objects.instance [None req-22d47536-309b-4a82-ab94-b0b3f889201d tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lazy-loading 'flavor' on Instance uuid bf44e269-0297-473e-b6ce-04a40d0ec1b4 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1073.916800] env[62522]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port fa421858-7ef8-4e24-94ec-cb1477a79f22 could not be found.", "detail": ""}} {{(pid=62522) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1073.916800] env[62522]: DEBUG nova.network.neutron [-] Unable to show port fa421858-7ef8-4e24-94ec-cb1477a79f22 as it no longer exists. {{(pid=62522) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 1073.980532] env[62522]: DEBUG oslo_vmware.api [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416086, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.177953] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-903e6ef9-5e7a-40ef-9d4c-52bcb86d6570 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.189093] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc3d55a-5d56-4bcb-99a8-ae722f242c0d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.229135] env[62522]: DEBUG oslo_concurrency.lockutils [None req-fce7f2a7-97c4-4294-abda-3487ec0a6d32 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "522e778b-6e01-4554-a3eb-dd1efa7870de" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.580s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1074.229639] env[62522]: DEBUG nova.compute.manager [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1074.234311] env[62522]: DEBUG nova.compute.manager [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1074.234648] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1074.235769] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-26240414-233b-40ef-9afa-427e713d566d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.238235] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c37e2478-6a3f-49c2-a6d1-ceeba9e62e37 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.252452] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c93b513e-541a-41e6-9cb6-654429258163 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.255242] env[62522]: DEBUG oslo_vmware.api [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Waiting for the task: (returnval){ [ 1074.255242] env[62522]: value = "task-2416087" [ 1074.255242] env[62522]: _type = "Task" [ 1074.255242] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.272021] env[62522]: DEBUG nova.compute.provider_tree [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1074.284016] env[62522]: DEBUG oslo_vmware.api [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Task: {'id': task-2416087, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.482914] env[62522]: DEBUG oslo_vmware.api [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416086, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.570743] env[62522]: DEBUG oslo_vmware.rw_handles [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529928a2-3869-7c6c-048a-d03aeeebcc4c/disk-0.vmdk. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1074.571736] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e241ecc-0662-40d2-972b-5141a4d7f1c4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.586907] env[62522]: DEBUG oslo_vmware.rw_handles [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529928a2-3869-7c6c-048a-d03aeeebcc4c/disk-0.vmdk is in state: ready. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1074.587106] env[62522]: ERROR oslo_vmware.rw_handles [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529928a2-3869-7c6c-048a-d03aeeebcc4c/disk-0.vmdk due to incomplete transfer. [ 1074.587373] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-bc3077ed-b426-4cba-b728-843758f6fce4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.597445] env[62522]: DEBUG oslo_vmware.rw_handles [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529928a2-3869-7c6c-048a-d03aeeebcc4c/disk-0.vmdk. {{(pid=62522) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1074.597640] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Uploaded image 61bbb676-eb4a-448b-9d8a-abdf1c9af6ab to the Glance image server {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1074.599652] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Destroying the VM {{(pid=62522) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1074.599968] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-8f3f473b-a6de-4484-a5fd-cc5b163cd23e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.608960] env[62522]: DEBUG oslo_vmware.api [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1074.608960] env[62522]: value = "task-2416088" [ 1074.608960] env[62522]: _type = "Task" [ 1074.608960] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.618778] env[62522]: DEBUG oslo_vmware.api [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416088, 'name': Destroy_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.770778] env[62522]: DEBUG oslo_vmware.api [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Task: {'id': task-2416087, 'name': PowerOffVM_Task, 'duration_secs': 0.42642} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.772198] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1074.772592] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1074.772984] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Volume detach. Driver type: vmdk {{(pid=62522) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1074.773303] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489690', 'volume_id': '5a72a163-f70c-478d-aff3-2a748c2d25d5', 'name': 'volume-5a72a163-f70c-478d-aff3-2a748c2d25d5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6', 'attached_at': '', 'detached_at': '', 'volume_id': '5a72a163-f70c-478d-aff3-2a748c2d25d5', 'serial': '5a72a163-f70c-478d-aff3-2a748c2d25d5'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1074.774306] env[62522]: DEBUG nova.scheduler.client.report [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1074.778890] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0df5f0a-fbf8-4a80-b1d9-5ce74b538058 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.805126] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd4d901a-2f7c-420d-be33-24bf4a3b393f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.811039] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-174f330b-4190-462c-8abc-bf6a7b6332dc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.835397] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b6e7ea3-1a6b-4164-bb97-80a77cb7ace4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.857066] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] The volume has not been displaced from its original location: [datastore2] volume-5a72a163-f70c-478d-aff3-2a748c2d25d5/volume-5a72a163-f70c-478d-aff3-2a748c2d25d5.vmdk. No consolidation needed. {{(pid=62522) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1074.863801] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Reconfiguring VM instance instance-0000003d to detach disk 2000 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1074.865116] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af91cbaa-11e7-4ca2-a60e-08446cb985a7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.881672] env[62522]: DEBUG nova.compute.manager [req-5365dc77-5998-4b11-8bfc-4c7cea767b99 req-5a0e2d8c-5d31-4e0c-bb42-27a407b3fc46 service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Received event network-vif-deleted-954fee91-36f2-497a-a856-6828a519a456 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1074.881890] env[62522]: INFO nova.compute.manager [req-5365dc77-5998-4b11-8bfc-4c7cea767b99 req-5a0e2d8c-5d31-4e0c-bb42-27a407b3fc46 service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Neutron deleted interface 954fee91-36f2-497a-a856-6828a519a456; detaching it from the instance and deleting it from the info cache [ 1074.882128] env[62522]: DEBUG nova.network.neutron [req-5365dc77-5998-4b11-8bfc-4c7cea767b99 req-5a0e2d8c-5d31-4e0c-bb42-27a407b3fc46 service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1074.886596] env[62522]: DEBUG oslo_vmware.api [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Waiting for the task: (returnval){ [ 1074.886596] env[62522]: value = "task-2416089" [ 1074.886596] env[62522]: _type = "Task" [ 1074.886596] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.901830] env[62522]: DEBUG oslo_vmware.api [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Task: {'id': task-2416089, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.915676] env[62522]: DEBUG oslo_concurrency.lockutils [None req-22d47536-309b-4a82-ab94-b0b3f889201d tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "bf44e269-0297-473e-b6ce-04a40d0ec1b4" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.344s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1074.983766] env[62522]: DEBUG oslo_vmware.api [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416086, 'name': CloneVM_Task} progress is 95%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.123109] env[62522]: DEBUG oslo_vmware.api [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416088, 'name': Destroy_Task, 'duration_secs': 0.37466} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.123428] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Destroyed the VM [ 1075.125401] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Deleting Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1075.125401] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-34028fb5-f858-4d86-a665-690f9b842994 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.139317] env[62522]: DEBUG oslo_vmware.api [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1075.139317] env[62522]: value = "task-2416090" [ 1075.139317] env[62522]: _type = "Task" [ 1075.139317] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.151633] env[62522]: DEBUG oslo_vmware.api [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416090, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.284040] env[62522]: DEBUG oslo_concurrency.lockutils [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.409s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1075.284809] env[62522]: DEBUG nova.compute.manager [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1075.290717] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c0840bc0-0684-45db-bc38-154ec9529bb8 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.771s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1075.290983] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c0840bc0-0684-45db-bc38-154ec9529bb8 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1075.293174] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.521s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1075.295273] env[62522]: INFO nova.compute.claims [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1075.331116] env[62522]: INFO nova.scheduler.client.report [None req-c0840bc0-0684-45db-bc38-154ec9529bb8 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Deleted allocations for instance cabe40a0-8bd0-4d77-b949-298bd194fa42 [ 1075.343771] env[62522]: DEBUG nova.network.neutron [-] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1075.385768] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8afaed29-022f-4365-a429-ac5b9db42875 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.397962] env[62522]: DEBUG oslo_vmware.api [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Task: {'id': task-2416089, 'name': ReconfigVM_Task, 'duration_secs': 0.328011} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.399754] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Reconfigured VM instance instance-0000003d to detach disk 2000 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1075.406726] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1cfd43b1-b65b-4cce-b9ca-06e7fdffc844 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.421760] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b8c7810-fa82-4e77-beef-be3202f05f65 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.446835] env[62522]: DEBUG oslo_vmware.api [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Waiting for the task: (returnval){ [ 1075.446835] env[62522]: value = "task-2416091" [ 1075.446835] env[62522]: _type = "Task" [ 1075.446835] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.460988] env[62522]: DEBUG oslo_vmware.api [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Task: {'id': task-2416091, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.487095] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "bf44e269-0297-473e-b6ce-04a40d0ec1b4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1075.487095] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "bf44e269-0297-473e-b6ce-04a40d0ec1b4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1075.487095] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "bf44e269-0297-473e-b6ce-04a40d0ec1b4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1075.487095] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "bf44e269-0297-473e-b6ce-04a40d0ec1b4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1075.487485] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "bf44e269-0297-473e-b6ce-04a40d0ec1b4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1075.489521] env[62522]: DEBUG nova.compute.manager [req-5365dc77-5998-4b11-8bfc-4c7cea767b99 req-5a0e2d8c-5d31-4e0c-bb42-27a407b3fc46 service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Detach interface failed, port_id=954fee91-36f2-497a-a856-6828a519a456, reason: Instance fcd0eef6-d059-4495-a982-058b6c9626d1 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1075.492015] env[62522]: INFO nova.compute.manager [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Terminating instance [ 1075.505334] env[62522]: DEBUG oslo_vmware.api [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416086, 'name': CloneVM_Task, 'duration_secs': 1.693011} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.505644] env[62522]: INFO nova.virt.vmwareapi.vmops [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Created linked-clone VM from snapshot [ 1075.506497] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74a7541e-29be-4071-bfd1-05e5c494cef6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.524920] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Uploading image 4b03bdf9-70a0-4803-beb1-cbcd84dc8ac1 {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1075.566536] env[62522]: DEBUG oslo_vmware.rw_handles [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1075.566536] env[62522]: value = "vm-489809" [ 1075.566536] env[62522]: _type = "VirtualMachine" [ 1075.566536] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1075.566850] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-1cdf7343-3a7a-4c01-b981-1f6f913aefd5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.586959] env[62522]: DEBUG oslo_vmware.rw_handles [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lease: (returnval){ [ 1075.586959] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52834aad-47cf-7feb-1d23-99a46c058dca" [ 1075.586959] env[62522]: _type = "HttpNfcLease" [ 1075.586959] env[62522]: } obtained for exporting VM: (result){ [ 1075.586959] env[62522]: value = "vm-489809" [ 1075.586959] env[62522]: _type = "VirtualMachine" [ 1075.586959] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1075.587288] env[62522]: DEBUG oslo_vmware.api [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the lease: (returnval){ [ 1075.587288] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52834aad-47cf-7feb-1d23-99a46c058dca" [ 1075.587288] env[62522]: _type = "HttpNfcLease" [ 1075.587288] env[62522]: } to be ready. {{(pid=62522) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1075.601060] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1075.601060] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52834aad-47cf-7feb-1d23-99a46c058dca" [ 1075.601060] env[62522]: _type = "HttpNfcLease" [ 1075.601060] env[62522]: } is initializing. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1075.650167] env[62522]: DEBUG oslo_vmware.api [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416090, 'name': RemoveSnapshot_Task, 'duration_secs': 0.415664} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.650870] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Deleted Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1075.650870] env[62522]: DEBUG nova.compute.manager [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1075.651458] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1577036-ebd9-4359-b2a4-b5147f6d1746 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.799659] env[62522]: DEBUG nova.compute.utils [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1075.806015] env[62522]: DEBUG nova.compute.manager [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1075.806101] env[62522]: DEBUG nova.network.neutron [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1075.839117] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c0840bc0-0684-45db-bc38-154ec9529bb8 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "cabe40a0-8bd0-4d77-b949-298bd194fa42" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.048s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1075.845771] env[62522]: INFO nova.compute.manager [-] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Took 2.09 seconds to deallocate network for instance. [ 1075.923209] env[62522]: DEBUG nova.policy [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0b81d399f06a47bc819693b52bb74004', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ff5da278d2be4ca983424c8291beadec', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1075.942081] env[62522]: DEBUG oslo_concurrency.lockutils [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "522e778b-6e01-4554-a3eb-dd1efa7870de" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1075.942780] env[62522]: DEBUG oslo_concurrency.lockutils [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "522e778b-6e01-4554-a3eb-dd1efa7870de" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1075.942780] env[62522]: DEBUG oslo_concurrency.lockutils [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "522e778b-6e01-4554-a3eb-dd1efa7870de-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1075.942780] env[62522]: DEBUG oslo_concurrency.lockutils [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "522e778b-6e01-4554-a3eb-dd1efa7870de-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1075.943080] env[62522]: DEBUG oslo_concurrency.lockutils [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "522e778b-6e01-4554-a3eb-dd1efa7870de-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1075.945091] env[62522]: INFO nova.compute.manager [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Terminating instance [ 1075.958362] env[62522]: DEBUG oslo_vmware.api [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Task: {'id': task-2416091, 'name': ReconfigVM_Task, 'duration_secs': 0.175462} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.958942] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489690', 'volume_id': '5a72a163-f70c-478d-aff3-2a748c2d25d5', 'name': 'volume-5a72a163-f70c-478d-aff3-2a748c2d25d5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6', 'attached_at': '', 'detached_at': '', 'volume_id': '5a72a163-f70c-478d-aff3-2a748c2d25d5', 'serial': '5a72a163-f70c-478d-aff3-2a748c2d25d5'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1075.958942] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1075.959745] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9359f27-b2f2-4bce-a8c5-88adb82f3e0e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.967599] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1075.968460] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bb011c87-649e-4878-93b9-80740a74699e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.998382] env[62522]: DEBUG nova.compute.manager [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1075.998622] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1075.999532] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-826305f6-b826-4d28-b99b-2b7fdb1c225c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.008814] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1076.009102] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-87bf005f-f901-43d8-ab22-8dbf5ee587bb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.017420] env[62522]: DEBUG oslo_vmware.api [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 1076.017420] env[62522]: value = "task-2416094" [ 1076.017420] env[62522]: _type = "Task" [ 1076.017420] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.028129] env[62522]: DEBUG oslo_vmware.api [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2416094, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.056402] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1076.056402] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1076.056402] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Deleting the datastore file [datastore2] 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1076.056402] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bf3cc847-fc8b-4acb-9800-ce4141d9766e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.063630] env[62522]: DEBUG oslo_vmware.api [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Waiting for the task: (returnval){ [ 1076.063630] env[62522]: value = "task-2416095" [ 1076.063630] env[62522]: _type = "Task" [ 1076.063630] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.074829] env[62522]: DEBUG oslo_vmware.api [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Task: {'id': task-2416095, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.100802] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1076.100802] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52834aad-47cf-7feb-1d23-99a46c058dca" [ 1076.100802] env[62522]: _type = "HttpNfcLease" [ 1076.100802] env[62522]: } is ready. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1076.101698] env[62522]: DEBUG oslo_vmware.rw_handles [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1076.101698] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52834aad-47cf-7feb-1d23-99a46c058dca" [ 1076.101698] env[62522]: _type = "HttpNfcLease" [ 1076.101698] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1076.102619] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a44be2a0-b9b8-440c-be4c-67f0e5098c28 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.116205] env[62522]: DEBUG oslo_vmware.rw_handles [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c73dcf-3395-fbe8-60ba-d0b6e7aa0642/disk-0.vmdk from lease info. {{(pid=62522) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1076.116205] env[62522]: DEBUG oslo_vmware.rw_handles [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c73dcf-3395-fbe8-60ba-d0b6e7aa0642/disk-0.vmdk for reading. {{(pid=62522) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1076.190055] env[62522]: INFO nova.compute.manager [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Shelve offloading [ 1076.240712] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6cfcd53a-05d4-49da-85c3-8aafc73b1ca7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.311436] env[62522]: DEBUG nova.compute.manager [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1076.351628] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1076.453715] env[62522]: DEBUG nova.compute.manager [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1076.454319] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1076.455301] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dd8ec04-2a15-476f-a29b-fc6febcee27a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.470038] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1076.475507] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-748ab8e3-b6ee-4f6d-9449-6af168729e56 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.491395] env[62522]: DEBUG oslo_vmware.api [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1076.491395] env[62522]: value = "task-2416096" [ 1076.491395] env[62522]: _type = "Task" [ 1076.491395] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.503401] env[62522]: DEBUG oslo_vmware.api [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416096, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.537364] env[62522]: DEBUG oslo_vmware.api [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2416094, 'name': PowerOffVM_Task, 'duration_secs': 0.240689} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.537798] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1076.538106] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1076.539033] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-648fc570-2fa4-45b8-874b-0d98bd5991a7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.580930] env[62522]: DEBUG oslo_vmware.api [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Task: {'id': task-2416095, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.088955} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.580930] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1076.580930] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1076.580930] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1076.580930] env[62522]: INFO nova.compute.manager [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Took 2.35 seconds to destroy the instance on the hypervisor. [ 1076.580930] env[62522]: DEBUG oslo.service.loopingcall [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1076.582864] env[62522]: DEBUG nova.compute.manager [-] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1076.582864] env[62522]: DEBUG nova.network.neutron [-] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1076.629169] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1076.629512] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1076.629694] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Deleting the datastore file [datastore1] bf44e269-0297-473e-b6ce-04a40d0ec1b4 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1076.630341] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-723646a3-7de3-4104-a7ec-f20d8df1c511 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.641958] env[62522]: DEBUG oslo_vmware.api [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 1076.641958] env[62522]: value = "task-2416098" [ 1076.641958] env[62522]: _type = "Task" [ 1076.641958] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.658545] env[62522]: DEBUG oslo_vmware.api [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2416098, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.668258] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecd702b1-f1f4-4ffe-8a56-fb10ffb764f0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.677932] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37182f04-4206-41bb-9be3-c6abc533c122 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.719424] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1076.721451] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4705b8f2-a766-4266-814f-e8648f5d5b57 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.722519] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60bf4cec-dd19-467a-9b8f-e7fb53560205 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.733654] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e033cd8b-e168-4bd2-9fee-8c2e4525f160 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.739475] env[62522]: DEBUG oslo_vmware.api [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1076.739475] env[62522]: value = "task-2416099" [ 1076.739475] env[62522]: _type = "Task" [ 1076.739475] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.753641] env[62522]: DEBUG nova.compute.provider_tree [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1076.761883] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] VM already powered off {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1076.761883] env[62522]: DEBUG nova.compute.manager [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1076.761883] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c150c12-2389-4523-a668-48a56acaa177 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.768270] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "refresh_cache-c28d2907-5b59-4df8-91a8-4ba0f2047d89" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1076.768557] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquired lock "refresh_cache-c28d2907-5b59-4df8-91a8-4ba0f2047d89" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.768809] env[62522]: DEBUG nova.network.neutron [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1076.990532] env[62522]: DEBUG nova.network.neutron [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Successfully created port: 319c60b7-98f1-4cf2-8a9a-d2c1009599bf {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1077.005550] env[62522]: DEBUG oslo_vmware.api [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416096, 'name': PowerOffVM_Task, 'duration_secs': 0.237703} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.005984] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1077.006302] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1077.006627] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fe1779a9-0385-425d-b9f0-cd2fc68b6a69 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.083440] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1077.083847] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1077.083847] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Deleting the datastore file [datastore1] 522e778b-6e01-4554-a3eb-dd1efa7870de {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1077.084119] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9c37c332-d5d6-4c24-ae36-53ede5b9d640 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.093054] env[62522]: DEBUG oslo_vmware.api [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1077.093054] env[62522]: value = "task-2416101" [ 1077.093054] env[62522]: _type = "Task" [ 1077.093054] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.102102] env[62522]: DEBUG oslo_vmware.api [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416101, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.153538] env[62522]: DEBUG oslo_vmware.api [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2416098, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165914} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.154649] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1077.154649] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1077.154649] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1077.154649] env[62522]: INFO nova.compute.manager [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1077.154971] env[62522]: DEBUG oslo.service.loopingcall [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1077.154971] env[62522]: DEBUG nova.compute.manager [-] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1077.155040] env[62522]: DEBUG nova.network.neutron [-] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1077.260689] env[62522]: DEBUG nova.scheduler.client.report [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1077.322579] env[62522]: DEBUG nova.compute.manager [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1077.356781] env[62522]: DEBUG nova.virt.hardware [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1077.356781] env[62522]: DEBUG nova.virt.hardware [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1077.357713] env[62522]: DEBUG nova.virt.hardware [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1077.357713] env[62522]: DEBUG nova.virt.hardware [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1077.357870] env[62522]: DEBUG nova.virt.hardware [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1077.358079] env[62522]: DEBUG nova.virt.hardware [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1077.358338] env[62522]: DEBUG nova.virt.hardware [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1077.358693] env[62522]: DEBUG nova.virt.hardware [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1077.358949] env[62522]: DEBUG nova.virt.hardware [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1077.359193] env[62522]: DEBUG nova.virt.hardware [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1077.359418] env[62522]: DEBUG nova.virt.hardware [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1077.360918] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3603219e-545c-40c3-a8f1-91baa3b574a8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.370260] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-679c835f-77c6-44eb-84e3-17132040ea77 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.605743] env[62522]: DEBUG oslo_vmware.api [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416101, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169478} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.606154] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1077.606565] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1077.606783] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1077.607126] env[62522]: INFO nova.compute.manager [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1077.607449] env[62522]: DEBUG oslo.service.loopingcall [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1077.607659] env[62522]: DEBUG nova.compute.manager [-] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1077.607898] env[62522]: DEBUG nova.network.neutron [-] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1077.766596] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.473s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1077.767436] env[62522]: DEBUG nova.compute.manager [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1077.770847] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.419s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1077.771039] env[62522]: DEBUG nova.objects.instance [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lazy-loading 'resources' on Instance uuid fcd0eef6-d059-4495-a982-058b6c9626d1 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1078.180668] env[62522]: DEBUG nova.compute.manager [req-5fb4cce8-7a2f-4b6d-84c6-549950557fb2 req-f9f1ea03-8bdd-4734-b92b-628ceb3eb55c service nova] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Received event network-vif-deleted-59c9ae48-dc88-4de9-ba91-f62a004a177c {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1078.180875] env[62522]: INFO nova.compute.manager [req-5fb4cce8-7a2f-4b6d-84c6-549950557fb2 req-f9f1ea03-8bdd-4734-b92b-628ceb3eb55c service nova] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Neutron deleted interface 59c9ae48-dc88-4de9-ba91-f62a004a177c; detaching it from the instance and deleting it from the info cache [ 1078.181082] env[62522]: DEBUG nova.network.neutron [req-5fb4cce8-7a2f-4b6d-84c6-549950557fb2 req-f9f1ea03-8bdd-4734-b92b-628ceb3eb55c service nova] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1078.274695] env[62522]: DEBUG nova.compute.utils [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1078.284610] env[62522]: DEBUG nova.compute.manager [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1078.284610] env[62522]: DEBUG nova.network.neutron [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1078.367205] env[62522]: DEBUG nova.policy [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '90b507a3fb754b77beba20834616fc9d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aebe6f27d0ef4b5f8106bec8e21fa089', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1078.476437] env[62522]: DEBUG nova.network.neutron [-] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1078.521443] env[62522]: DEBUG nova.network.neutron [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Updating instance_info_cache with network_info: [{"id": "931dfe44-9ac3-4df4-a4ea-6c8612389451", "address": "fa:16:3e:f5:6d:32", "network": {"id": "949f3536-8a7e-4edf-b6cc-6a264fe5fe83", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1891232839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f93394feaa4f4b61a5d3d670d32ec599", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap931dfe44-9a", "ovs_interfaceid": "931dfe44-9ac3-4df4-a4ea-6c8612389451", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1078.546682] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5c30a7d-78bd-4f10-b8c6-b3baf8ddcef1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.558516] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-442bbdc7-3bb8-45e0-a824-ee0812d4a5f2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.596763] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17e3a04f-0666-4530-a2c2-defff76b4e62 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.606987] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c88768da-a63a-431e-aa4f-9357b5dd79e4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.622443] env[62522]: DEBUG nova.compute.provider_tree [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1078.687852] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-944a4dc0-8113-4b45-b933-bddd45abfb97 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.701683] env[62522]: DEBUG nova.network.neutron [-] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1078.705666] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a08a6c7-5626-4db5-b687-92e82599b7cb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.728404] env[62522]: INFO nova.compute.manager [-] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Took 1.57 seconds to deallocate network for instance. [ 1078.753544] env[62522]: DEBUG nova.compute.manager [req-5fb4cce8-7a2f-4b6d-84c6-549950557fb2 req-f9f1ea03-8bdd-4734-b92b-628ceb3eb55c service nova] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Detach interface failed, port_id=59c9ae48-dc88-4de9-ba91-f62a004a177c, reason: Instance 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1078.782898] env[62522]: DEBUG nova.compute.manager [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1078.801913] env[62522]: DEBUG nova.network.neutron [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Successfully created port: 3a727503-8134-40ef-91bb-d4d7be25f408 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1078.892315] env[62522]: DEBUG nova.compute.manager [req-8b7eedaa-acc2-478e-8d7d-f8441af4ccaf req-520405f8-a657-4e2d-86e9-e3652ded92d0 service nova] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Received event network-vif-deleted-b258477a-b39a-4d17-975a-087d4d6d41bd {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1078.892478] env[62522]: INFO nova.compute.manager [req-8b7eedaa-acc2-478e-8d7d-f8441af4ccaf req-520405f8-a657-4e2d-86e9-e3652ded92d0 service nova] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Neutron deleted interface b258477a-b39a-4d17-975a-087d4d6d41bd; detaching it from the instance and deleting it from the info cache [ 1078.892810] env[62522]: DEBUG nova.network.neutron [req-8b7eedaa-acc2-478e-8d7d-f8441af4ccaf req-520405f8-a657-4e2d-86e9-e3652ded92d0 service nova] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1078.979215] env[62522]: INFO nova.compute.manager [-] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Took 2.40 seconds to deallocate network for instance. [ 1079.025270] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Releasing lock "refresh_cache-c28d2907-5b59-4df8-91a8-4ba0f2047d89" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1079.126259] env[62522]: DEBUG nova.scheduler.client.report [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1079.237416] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1079.353692] env[62522]: DEBUG nova.network.neutron [-] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1079.397912] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-195bac50-8002-4719-bf7f-c9854a2b9456 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.413791] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-055cf7b4-c4b4-419f-9189-ff2dc8bd66d1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.452476] env[62522]: DEBUG nova.compute.manager [req-8b7eedaa-acc2-478e-8d7d-f8441af4ccaf req-520405f8-a657-4e2d-86e9-e3652ded92d0 service nova] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Detach interface failed, port_id=b258477a-b39a-4d17-975a-087d4d6d41bd, reason: Instance 522e778b-6e01-4554-a3eb-dd1efa7870de could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1079.554702] env[62522]: INFO nova.compute.manager [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Took 0.57 seconds to detach 1 volumes for instance. [ 1079.560593] env[62522]: DEBUG nova.compute.manager [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Deleting volume: 5a72a163-f70c-478d-aff3-2a748c2d25d5 {{(pid=62522) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1079.635835] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.865s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1079.642851] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.404s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1079.642851] env[62522]: DEBUG nova.objects.instance [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lazy-loading 'resources' on Instance uuid bf44e269-0297-473e-b6ce-04a40d0ec1b4 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1079.671424] env[62522]: INFO nova.scheduler.client.report [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Deleted allocations for instance fcd0eef6-d059-4495-a982-058b6c9626d1 [ 1079.800029] env[62522]: DEBUG nova.compute.manager [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1079.828341] env[62522]: DEBUG nova.virt.hardware [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1079.828600] env[62522]: DEBUG nova.virt.hardware [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1079.828756] env[62522]: DEBUG nova.virt.hardware [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1079.828937] env[62522]: DEBUG nova.virt.hardware [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1079.829168] env[62522]: DEBUG nova.virt.hardware [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1079.829337] env[62522]: DEBUG nova.virt.hardware [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1079.829549] env[62522]: DEBUG nova.virt.hardware [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1079.829707] env[62522]: DEBUG nova.virt.hardware [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1079.829972] env[62522]: DEBUG nova.virt.hardware [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1079.830170] env[62522]: DEBUG nova.virt.hardware [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1079.830345] env[62522]: DEBUG nova.virt.hardware [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1079.831224] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b150e461-3bc6-421c-8cd5-e52ab3e0ed20 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.840503] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c09295e3-a32c-4090-b511-0dfaa661d8e3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.856457] env[62522]: INFO nova.compute.manager [-] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Took 2.25 seconds to deallocate network for instance. [ 1079.911332] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1079.912305] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eda6ebc-55c7-4b60-821b-a725166e0d95 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.921329] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1079.921660] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b1228924-b8fe-4f9f-b08f-5d0a112e5aeb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.998478] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1079.998750] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1079.998967] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Deleting the datastore file [datastore1] c28d2907-5b59-4df8-91a8-4ba0f2047d89 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1079.999614] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d36c4d80-99aa-4336-b6fc-b0fa8070fc5e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.008891] env[62522]: DEBUG oslo_vmware.api [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1080.008891] env[62522]: value = "task-2416107" [ 1080.008891] env[62522]: _type = "Task" [ 1080.008891] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.020549] env[62522]: DEBUG oslo_vmware.api [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416107, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.113248] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.193053] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3ebbec8a-9801-4a56-982e-994e8e96e7f2 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "fcd0eef6-d059-4495-a982-058b6c9626d1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.141s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1080.193053] env[62522]: DEBUG oslo_concurrency.lockutils [req-e6fb195c-14d7-4917-909c-8bf7730eb8d6 req-96b22492-0e44-4e96-9db0-d2c500da2b5f service nova] Acquired lock "fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1080.193053] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a669b55-3cba-4a2c-812c-9326c65d3eea {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.202932] env[62522]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1080.202932] env[62522]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=62522) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1080.204025] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ba8462d1-ef09-4410-970e-ceab7400317b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.223171] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45c5e2cb-6e08-4b7d-a4b9-86585bd8d9ad {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.243326] env[62522]: DEBUG nova.compute.manager [req-cb2a133b-156b-4c39-8ca3-94e977ebf8db req-0cdd58d7-be48-48ce-965c-64bca49c3e08 service nova] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Received event network-vif-deleted-36fe2fd3-3447-4032-8c02-5be9712b769d {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1080.273201] env[62522]: ERROR root [req-e6fb195c-14d7-4917-909c-8bf7730eb8d6 req-96b22492-0e44-4e96-9db0-d2c500da2b5f service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-489754' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 480, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-489754' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-489754' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-489754'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-489754' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-489754' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-489754'}\n"]: nova.exception.InstanceNotFound: Instance fcd0eef6-d059-4495-a982-058b6c9626d1 could not be found. [ 1080.273201] env[62522]: DEBUG oslo_concurrency.lockutils [req-e6fb195c-14d7-4917-909c-8bf7730eb8d6 req-96b22492-0e44-4e96-9db0-d2c500da2b5f service nova] Releasing lock "fcd0eef6-d059-4495-a982-058b6c9626d1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1080.273405] env[62522]: DEBUG nova.compute.manager [req-e6fb195c-14d7-4917-909c-8bf7730eb8d6 req-96b22492-0e44-4e96-9db0-d2c500da2b5f service nova] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Detach interface failed, port_id=fa421858-7ef8-4e24-94ec-cb1477a79f22, reason: Instance fcd0eef6-d059-4495-a982-058b6c9626d1 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1080.273549] env[62522]: DEBUG nova.compute.manager [req-e6fb195c-14d7-4917-909c-8bf7730eb8d6 req-96b22492-0e44-4e96-9db0-d2c500da2b5f service nova] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Received event network-vif-deleted-9e10cc19-76da-49d9-80b6-068ce128a1b0 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1080.273715] env[62522]: INFO nova.compute.manager [req-e6fb195c-14d7-4917-909c-8bf7730eb8d6 req-96b22492-0e44-4e96-9db0-d2c500da2b5f service nova] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Neutron deleted interface 9e10cc19-76da-49d9-80b6-068ce128a1b0; detaching it from the instance and deleting it from the info cache [ 1080.273889] env[62522]: DEBUG nova.network.neutron [req-e6fb195c-14d7-4917-909c-8bf7730eb8d6 req-96b22492-0e44-4e96-9db0-d2c500da2b5f service nova] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.326210] env[62522]: DEBUG nova.network.neutron [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Successfully updated port: 319c60b7-98f1-4cf2-8a9a-d2c1009599bf {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1080.362806] env[62522]: DEBUG oslo_concurrency.lockutils [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.377727] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab475b3-ac95-4dec-b2d7-d86466444702 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.387604] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79db0f89-fcf5-42b7-af43-dc143c555f69 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.419301] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68f5dde2-a879-41af-948d-d42869c84592 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.425090] env[62522]: DEBUG nova.network.neutron [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Successfully updated port: 3a727503-8134-40ef-91bb-d4d7be25f408 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1080.429567] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6c10716-8ca8-4c48-8fca-f4f2425e53ff {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.444565] env[62522]: DEBUG nova.compute.provider_tree [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1080.521015] env[62522]: DEBUG oslo_vmware.api [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416107, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174096} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.521676] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1080.522515] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1080.522601] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1080.544964] env[62522]: INFO nova.scheduler.client.report [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Deleted allocations for instance c28d2907-5b59-4df8-91a8-4ba0f2047d89 [ 1080.777588] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-43bfa0fc-9694-4453-908d-24421484fb3f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.788236] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5e4cd5a-25e6-430b-bb52-d24bd115fabf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.819525] env[62522]: DEBUG nova.compute.manager [req-e6fb195c-14d7-4917-909c-8bf7730eb8d6 req-96b22492-0e44-4e96-9db0-d2c500da2b5f service nova] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Detach interface failed, port_id=9e10cc19-76da-49d9-80b6-068ce128a1b0, reason: Instance cabe40a0-8bd0-4d77-b949-298bd194fa42 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1080.831883] env[62522]: DEBUG oslo_concurrency.lockutils [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "refresh_cache-9337449d-5aff-4170-83ea-42fe2e9d1657" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1080.834045] env[62522]: DEBUG oslo_concurrency.lockutils [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquired lock "refresh_cache-9337449d-5aff-4170-83ea-42fe2e9d1657" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1080.834045] env[62522]: DEBUG nova.network.neutron [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1080.932601] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Acquiring lock "refresh_cache-783d9ae7-67f5-4c54-81a7-6715b762afb3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1080.932601] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Acquired lock "refresh_cache-783d9ae7-67f5-4c54-81a7-6715b762afb3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1080.932601] env[62522]: DEBUG nova.network.neutron [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1080.947686] env[62522]: DEBUG nova.scheduler.client.report [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1080.962405] env[62522]: DEBUG nova.compute.manager [req-57167260-d788-4b58-b000-7f665e1b98ee req-f73249ae-21b3-4ab6-8b46-beac18057e4b service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Received event network-vif-unplugged-931dfe44-9ac3-4df4-a4ea-6c8612389451 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1080.962638] env[62522]: DEBUG oslo_concurrency.lockutils [req-57167260-d788-4b58-b000-7f665e1b98ee req-f73249ae-21b3-4ab6-8b46-beac18057e4b service nova] Acquiring lock "c28d2907-5b59-4df8-91a8-4ba0f2047d89-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.962851] env[62522]: DEBUG oslo_concurrency.lockutils [req-57167260-d788-4b58-b000-7f665e1b98ee req-f73249ae-21b3-4ab6-8b46-beac18057e4b service nova] Lock "c28d2907-5b59-4df8-91a8-4ba0f2047d89-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1080.963242] env[62522]: DEBUG oslo_concurrency.lockutils [req-57167260-d788-4b58-b000-7f665e1b98ee req-f73249ae-21b3-4ab6-8b46-beac18057e4b service nova] Lock "c28d2907-5b59-4df8-91a8-4ba0f2047d89-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1080.963442] env[62522]: DEBUG nova.compute.manager [req-57167260-d788-4b58-b000-7f665e1b98ee req-f73249ae-21b3-4ab6-8b46-beac18057e4b service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] No waiting events found dispatching network-vif-unplugged-931dfe44-9ac3-4df4-a4ea-6c8612389451 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1080.963634] env[62522]: WARNING nova.compute.manager [req-57167260-d788-4b58-b000-7f665e1b98ee req-f73249ae-21b3-4ab6-8b46-beac18057e4b service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Received unexpected event network-vif-unplugged-931dfe44-9ac3-4df4-a4ea-6c8612389451 for instance with vm_state shelved_offloaded and task_state None. [ 1080.963792] env[62522]: DEBUG nova.compute.manager [req-57167260-d788-4b58-b000-7f665e1b98ee req-f73249ae-21b3-4ab6-8b46-beac18057e4b service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Received event network-changed-931dfe44-9ac3-4df4-a4ea-6c8612389451 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1080.963922] env[62522]: DEBUG nova.compute.manager [req-57167260-d788-4b58-b000-7f665e1b98ee req-f73249ae-21b3-4ab6-8b46-beac18057e4b service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Refreshing instance network info cache due to event network-changed-931dfe44-9ac3-4df4-a4ea-6c8612389451. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1080.964133] env[62522]: DEBUG oslo_concurrency.lockutils [req-57167260-d788-4b58-b000-7f665e1b98ee req-f73249ae-21b3-4ab6-8b46-beac18057e4b service nova] Acquiring lock "refresh_cache-c28d2907-5b59-4df8-91a8-4ba0f2047d89" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1080.964336] env[62522]: DEBUG oslo_concurrency.lockutils [req-57167260-d788-4b58-b000-7f665e1b98ee req-f73249ae-21b3-4ab6-8b46-beac18057e4b service nova] Acquired lock "refresh_cache-c28d2907-5b59-4df8-91a8-4ba0f2047d89" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1080.964423] env[62522]: DEBUG nova.network.neutron [req-57167260-d788-4b58-b000-7f665e1b98ee req-f73249ae-21b3-4ab6-8b46-beac18057e4b service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Refreshing network info cache for port 931dfe44-9ac3-4df4-a4ea-6c8612389451 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1081.051121] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.383380] env[62522]: DEBUG nova.network.neutron [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1081.443641] env[62522]: DEBUG oslo_concurrency.lockutils [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "e369d9e1-1345-4038-b5f3-f816fe767a72" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.443763] env[62522]: DEBUG oslo_concurrency.lockutils [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "e369d9e1-1345-4038-b5f3-f816fe767a72" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.452984] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.812s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1081.455821] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.343s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.455821] env[62522]: DEBUG nova.objects.instance [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Lazy-loading 'resources' on Instance uuid 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1081.566203] env[62522]: INFO nova.scheduler.client.report [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Deleted allocations for instance bf44e269-0297-473e-b6ce-04a40d0ec1b4 [ 1081.566203] env[62522]: DEBUG nova.network.neutron [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1081.662649] env[62522]: DEBUG nova.network.neutron [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Updating instance_info_cache with network_info: [{"id": "319c60b7-98f1-4cf2-8a9a-d2c1009599bf", "address": "fa:16:3e:5e:84:40", "network": {"id": "70e81afa-0eda-49c5-b072-e79b3c287468", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1715169983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff5da278d2be4ca983424c8291beadec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7654928b-7afe-42e3-a18d-68ecc775cefe", "external-id": "cl2-zone-807", "segmentation_id": 807, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap319c60b7-98", "ovs_interfaceid": "319c60b7-98f1-4cf2-8a9a-d2c1009599bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.774335] env[62522]: DEBUG oslo_concurrency.lockutils [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquiring lock "04a9d357-d094-487b-8f09-2f7e0c35f0d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.774628] env[62522]: DEBUG oslo_concurrency.lockutils [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lock "04a9d357-d094-487b-8f09-2f7e0c35f0d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.774869] env[62522]: DEBUG oslo_concurrency.lockutils [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquiring lock "04a9d357-d094-487b-8f09-2f7e0c35f0d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.775084] env[62522]: DEBUG oslo_concurrency.lockutils [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lock "04a9d357-d094-487b-8f09-2f7e0c35f0d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.775287] env[62522]: DEBUG oslo_concurrency.lockutils [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lock "04a9d357-d094-487b-8f09-2f7e0c35f0d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1081.779477] env[62522]: INFO nova.compute.manager [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Terminating instance [ 1081.946700] env[62522]: DEBUG nova.compute.manager [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1082.049718] env[62522]: DEBUG nova.network.neutron [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Updating instance_info_cache with network_info: [{"id": "3a727503-8134-40ef-91bb-d4d7be25f408", "address": "fa:16:3e:c6:fd:b2", "network": {"id": "217ced48-9485-4371-a119-08bf442b9bf5", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-471466315-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aebe6f27d0ef4b5f8106bec8e21fa089", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a727503-81", "ovs_interfaceid": "3a727503-8134-40ef-91bb-d4d7be25f408", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1082.083075] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0154ff05-5129-431d-9d0f-802d0cd69003 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "bf44e269-0297-473e-b6ce-04a40d0ec1b4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.596s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1082.122065] env[62522]: DEBUG nova.network.neutron [req-57167260-d788-4b58-b000-7f665e1b98ee req-f73249ae-21b3-4ab6-8b46-beac18057e4b service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Updated VIF entry in instance network info cache for port 931dfe44-9ac3-4df4-a4ea-6c8612389451. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1082.122528] env[62522]: DEBUG nova.network.neutron [req-57167260-d788-4b58-b000-7f665e1b98ee req-f73249ae-21b3-4ab6-8b46-beac18057e4b service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Updating instance_info_cache with network_info: [{"id": "931dfe44-9ac3-4df4-a4ea-6c8612389451", "address": "fa:16:3e:f5:6d:32", "network": {"id": "949f3536-8a7e-4edf-b6cc-6a264fe5fe83", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1891232839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f93394feaa4f4b61a5d3d670d32ec599", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap931dfe44-9a", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1082.166775] env[62522]: DEBUG oslo_concurrency.lockutils [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Releasing lock "refresh_cache-9337449d-5aff-4170-83ea-42fe2e9d1657" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1082.167225] env[62522]: DEBUG nova.compute.manager [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Instance network_info: |[{"id": "319c60b7-98f1-4cf2-8a9a-d2c1009599bf", "address": "fa:16:3e:5e:84:40", "network": {"id": "70e81afa-0eda-49c5-b072-e79b3c287468", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1715169983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff5da278d2be4ca983424c8291beadec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7654928b-7afe-42e3-a18d-68ecc775cefe", "external-id": "cl2-zone-807", "segmentation_id": 807, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap319c60b7-98", "ovs_interfaceid": "319c60b7-98f1-4cf2-8a9a-d2c1009599bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1082.168255] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5e:84:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7654928b-7afe-42e3-a18d-68ecc775cefe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '319c60b7-98f1-4cf2-8a9a-d2c1009599bf', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1082.177090] env[62522]: DEBUG oslo.service.loopingcall [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1082.180299] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1082.181262] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e36d2ff0-d40b-4711-97e8-b386fbc9f4cf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.209479] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1082.209479] env[62522]: value = "task-2416108" [ 1082.209479] env[62522]: _type = "Task" [ 1082.209479] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.226579] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416108, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.228855] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04aa1a58-e58a-4027-a22d-cf37678d29ed {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.242324] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f679bed7-aa21-4735-9090-698247a96d5b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.277012] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbb07369-74e8-49a6-a274-d28c55a1c608 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.282233] env[62522]: DEBUG nova.compute.manager [req-e484799f-bdce-46c6-a8d5-e4d630a13e7e req-cd3d1879-73bb-4a35-a873-51003ac1fb3d service nova] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Received event network-vif-plugged-3a727503-8134-40ef-91bb-d4d7be25f408 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1082.282470] env[62522]: DEBUG oslo_concurrency.lockutils [req-e484799f-bdce-46c6-a8d5-e4d630a13e7e req-cd3d1879-73bb-4a35-a873-51003ac1fb3d service nova] Acquiring lock "783d9ae7-67f5-4c54-81a7-6715b762afb3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.282673] env[62522]: DEBUG oslo_concurrency.lockutils [req-e484799f-bdce-46c6-a8d5-e4d630a13e7e req-cd3d1879-73bb-4a35-a873-51003ac1fb3d service nova] Lock "783d9ae7-67f5-4c54-81a7-6715b762afb3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1082.282836] env[62522]: DEBUG oslo_concurrency.lockutils [req-e484799f-bdce-46c6-a8d5-e4d630a13e7e req-cd3d1879-73bb-4a35-a873-51003ac1fb3d service nova] Lock "783d9ae7-67f5-4c54-81a7-6715b762afb3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1082.283037] env[62522]: DEBUG nova.compute.manager [req-e484799f-bdce-46c6-a8d5-e4d630a13e7e req-cd3d1879-73bb-4a35-a873-51003ac1fb3d service nova] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] No waiting events found dispatching network-vif-plugged-3a727503-8134-40ef-91bb-d4d7be25f408 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1082.283208] env[62522]: WARNING nova.compute.manager [req-e484799f-bdce-46c6-a8d5-e4d630a13e7e req-cd3d1879-73bb-4a35-a873-51003ac1fb3d service nova] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Received unexpected event network-vif-plugged-3a727503-8134-40ef-91bb-d4d7be25f408 for instance with vm_state building and task_state spawning. [ 1082.283371] env[62522]: DEBUG nova.compute.manager [req-e484799f-bdce-46c6-a8d5-e4d630a13e7e req-cd3d1879-73bb-4a35-a873-51003ac1fb3d service nova] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Received event network-changed-3a727503-8134-40ef-91bb-d4d7be25f408 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1082.283523] env[62522]: DEBUG nova.compute.manager [req-e484799f-bdce-46c6-a8d5-e4d630a13e7e req-cd3d1879-73bb-4a35-a873-51003ac1fb3d service nova] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Refreshing instance network info cache due to event network-changed-3a727503-8134-40ef-91bb-d4d7be25f408. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1082.283687] env[62522]: DEBUG oslo_concurrency.lockutils [req-e484799f-bdce-46c6-a8d5-e4d630a13e7e req-cd3d1879-73bb-4a35-a873-51003ac1fb3d service nova] Acquiring lock "refresh_cache-783d9ae7-67f5-4c54-81a7-6715b762afb3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1082.291191] env[62522]: DEBUG nova.compute.manager [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1082.291431] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1082.292306] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdbf3801-11e2-4254-82b2-047b7ddd9ccf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.295954] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e28b552-1f24-4ac7-b98d-1ca62c0ec686 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.313558] env[62522]: DEBUG nova.compute.provider_tree [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1082.320270] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1082.320270] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5995bcf5-9d2e-427b-af97-929b104584fc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.326779] env[62522]: DEBUG oslo_vmware.api [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 1082.326779] env[62522]: value = "task-2416110" [ 1082.326779] env[62522]: _type = "Task" [ 1082.326779] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.337815] env[62522]: DEBUG oslo_vmware.api [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416110, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.469833] env[62522]: DEBUG oslo_concurrency.lockutils [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.554024] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Releasing lock "refresh_cache-783d9ae7-67f5-4c54-81a7-6715b762afb3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1082.554024] env[62522]: DEBUG nova.compute.manager [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Instance network_info: |[{"id": "3a727503-8134-40ef-91bb-d4d7be25f408", "address": "fa:16:3e:c6:fd:b2", "network": {"id": "217ced48-9485-4371-a119-08bf442b9bf5", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-471466315-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aebe6f27d0ef4b5f8106bec8e21fa089", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a727503-81", "ovs_interfaceid": "3a727503-8134-40ef-91bb-d4d7be25f408", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1082.554319] env[62522]: DEBUG oslo_concurrency.lockutils [req-e484799f-bdce-46c6-a8d5-e4d630a13e7e req-cd3d1879-73bb-4a35-a873-51003ac1fb3d service nova] Acquired lock "refresh_cache-783d9ae7-67f5-4c54-81a7-6715b762afb3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.554438] env[62522]: DEBUG nova.network.neutron [req-e484799f-bdce-46c6-a8d5-e4d630a13e7e req-cd3d1879-73bb-4a35-a873-51003ac1fb3d service nova] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Refreshing network info cache for port 3a727503-8134-40ef-91bb-d4d7be25f408 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1082.557163] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c6:fd:b2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abe48956-848a-4e1f-b1f1-a27baa5390b9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3a727503-8134-40ef-91bb-d4d7be25f408', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1082.567508] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Creating folder: Project (aebe6f27d0ef4b5f8106bec8e21fa089). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1082.568737] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7b2270de-2f91-4983-81fe-bde9737f1bbf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.583447] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Created folder: Project (aebe6f27d0ef4b5f8106bec8e21fa089) in parent group-v489562. [ 1082.583640] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Creating folder: Instances. Parent ref: group-v489814. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1082.584440] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3328efe6-87ea-4c8a-a1aa-76a9ca6c7090 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.602216] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Created folder: Instances in parent group-v489814. [ 1082.602604] env[62522]: DEBUG oslo.service.loopingcall [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1082.603357] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1082.603357] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-78273d6f-aa11-424c-a9ab-b7d5fdaf65c6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.628158] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1082.628158] env[62522]: value = "task-2416113" [ 1082.628158] env[62522]: _type = "Task" [ 1082.628158] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.634721] env[62522]: DEBUG oslo_concurrency.lockutils [req-57167260-d788-4b58-b000-7f665e1b98ee req-f73249ae-21b3-4ab6-8b46-beac18057e4b service nova] Releasing lock "refresh_cache-c28d2907-5b59-4df8-91a8-4ba0f2047d89" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1082.635036] env[62522]: DEBUG nova.compute.manager [req-57167260-d788-4b58-b000-7f665e1b98ee req-f73249ae-21b3-4ab6-8b46-beac18057e4b service nova] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Received event network-vif-plugged-319c60b7-98f1-4cf2-8a9a-d2c1009599bf {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1082.635196] env[62522]: DEBUG oslo_concurrency.lockutils [req-57167260-d788-4b58-b000-7f665e1b98ee req-f73249ae-21b3-4ab6-8b46-beac18057e4b service nova] Acquiring lock "9337449d-5aff-4170-83ea-42fe2e9d1657-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.635449] env[62522]: DEBUG oslo_concurrency.lockutils [req-57167260-d788-4b58-b000-7f665e1b98ee req-f73249ae-21b3-4ab6-8b46-beac18057e4b service nova] Lock "9337449d-5aff-4170-83ea-42fe2e9d1657-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1082.635741] env[62522]: DEBUG oslo_concurrency.lockutils [req-57167260-d788-4b58-b000-7f665e1b98ee req-f73249ae-21b3-4ab6-8b46-beac18057e4b service nova] Lock "9337449d-5aff-4170-83ea-42fe2e9d1657-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1082.635821] env[62522]: DEBUG nova.compute.manager [req-57167260-d788-4b58-b000-7f665e1b98ee req-f73249ae-21b3-4ab6-8b46-beac18057e4b service nova] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] No waiting events found dispatching network-vif-plugged-319c60b7-98f1-4cf2-8a9a-d2c1009599bf {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1082.635962] env[62522]: WARNING nova.compute.manager [req-57167260-d788-4b58-b000-7f665e1b98ee req-f73249ae-21b3-4ab6-8b46-beac18057e4b service nova] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Received unexpected event network-vif-plugged-319c60b7-98f1-4cf2-8a9a-d2c1009599bf for instance with vm_state building and task_state spawning. [ 1082.636154] env[62522]: DEBUG nova.compute.manager [req-57167260-d788-4b58-b000-7f665e1b98ee req-f73249ae-21b3-4ab6-8b46-beac18057e4b service nova] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Received event network-changed-319c60b7-98f1-4cf2-8a9a-d2c1009599bf {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1082.636302] env[62522]: DEBUG nova.compute.manager [req-57167260-d788-4b58-b000-7f665e1b98ee req-f73249ae-21b3-4ab6-8b46-beac18057e4b service nova] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Refreshing instance network info cache due to event network-changed-319c60b7-98f1-4cf2-8a9a-d2c1009599bf. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1082.636492] env[62522]: DEBUG oslo_concurrency.lockutils [req-57167260-d788-4b58-b000-7f665e1b98ee req-f73249ae-21b3-4ab6-8b46-beac18057e4b service nova] Acquiring lock "refresh_cache-9337449d-5aff-4170-83ea-42fe2e9d1657" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1082.636626] env[62522]: DEBUG oslo_concurrency.lockutils [req-57167260-d788-4b58-b000-7f665e1b98ee req-f73249ae-21b3-4ab6-8b46-beac18057e4b service nova] Acquired lock "refresh_cache-9337449d-5aff-4170-83ea-42fe2e9d1657" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.636783] env[62522]: DEBUG nova.network.neutron [req-57167260-d788-4b58-b000-7f665e1b98ee req-f73249ae-21b3-4ab6-8b46-beac18057e4b service nova] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Refreshing network info cache for port 319c60b7-98f1-4cf2-8a9a-d2c1009599bf {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1082.641461] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "c28d2907-5b59-4df8-91a8-4ba0f2047d89" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.641641] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416113, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.722018] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416108, 'name': CreateVM_Task, 'duration_secs': 0.497762} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.722400] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1082.723048] env[62522]: DEBUG oslo_concurrency.lockutils [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1082.723363] env[62522]: DEBUG oslo_concurrency.lockutils [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.723599] env[62522]: DEBUG oslo_concurrency.lockutils [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1082.723944] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd912ea8-eb26-44cf-98f9-86a2f90429c7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.730513] env[62522]: DEBUG oslo_vmware.api [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 1082.730513] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a8a1bd-fe9f-c665-e541-9b8a4e3dc292" [ 1082.730513] env[62522]: _type = "Task" [ 1082.730513] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.739900] env[62522]: DEBUG oslo_vmware.api [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a8a1bd-fe9f-c665-e541-9b8a4e3dc292, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.818989] env[62522]: DEBUG nova.scheduler.client.report [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1082.837326] env[62522]: DEBUG oslo_vmware.api [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416110, 'name': PowerOffVM_Task, 'duration_secs': 0.314033} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.837656] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1082.837797] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1082.838069] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f3d4dc7a-3b97-48fd-baf1-a5e5dca90cd4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.914902] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1082.915199] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1082.915498] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Deleting the datastore file [datastore2] 04a9d357-d094-487b-8f09-2f7e0c35f0d7 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1082.915834] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2c74e048-cb86-4227-af82-60a55ccc4053 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.930031] env[62522]: DEBUG oslo_vmware.api [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for the task: (returnval){ [ 1082.930031] env[62522]: value = "task-2416115" [ 1082.930031] env[62522]: _type = "Task" [ 1082.930031] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.938263] env[62522]: DEBUG oslo_vmware.api [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416115, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.141496] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416113, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.243944] env[62522]: DEBUG oslo_vmware.api [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a8a1bd-fe9f-c665-e541-9b8a4e3dc292, 'name': SearchDatastore_Task, 'duration_secs': 0.032698} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.244760] env[62522]: DEBUG oslo_concurrency.lockutils [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1083.244760] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1083.244760] env[62522]: DEBUG oslo_concurrency.lockutils [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1083.244999] env[62522]: DEBUG oslo_concurrency.lockutils [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.244999] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1083.245287] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0013d6c4-0bdf-4a06-bff7-0093fb04a14e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.254666] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1083.254856] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1083.257734] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ccb1657-283d-4055-8858-65f3f50f9a4c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.264438] env[62522]: DEBUG oslo_vmware.api [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 1083.264438] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52aee90e-cd43-f2e6-3cdb-e6abc4368b89" [ 1083.264438] env[62522]: _type = "Task" [ 1083.264438] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.272639] env[62522]: DEBUG oslo_vmware.api [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52aee90e-cd43-f2e6-3cdb-e6abc4368b89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.324658] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.869s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1083.326881] env[62522]: DEBUG oslo_concurrency.lockutils [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.964s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.327133] env[62522]: DEBUG nova.objects.instance [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lazy-loading 'resources' on Instance uuid 522e778b-6e01-4554-a3eb-dd1efa7870de {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1083.347843] env[62522]: INFO nova.scheduler.client.report [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Deleted allocations for instance 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6 [ 1083.437968] env[62522]: DEBUG oslo_vmware.api [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Task: {'id': task-2416115, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.231902} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.438111] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1083.438455] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1083.438638] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1083.438814] env[62522]: INFO nova.compute.manager [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1083.439068] env[62522]: DEBUG oslo.service.loopingcall [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1083.441584] env[62522]: DEBUG nova.compute.manager [-] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1083.441678] env[62522]: DEBUG nova.network.neutron [-] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1083.628103] env[62522]: DEBUG nova.network.neutron [req-e484799f-bdce-46c6-a8d5-e4d630a13e7e req-cd3d1879-73bb-4a35-a873-51003ac1fb3d service nova] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Updated VIF entry in instance network info cache for port 3a727503-8134-40ef-91bb-d4d7be25f408. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1083.628633] env[62522]: DEBUG nova.network.neutron [req-e484799f-bdce-46c6-a8d5-e4d630a13e7e req-cd3d1879-73bb-4a35-a873-51003ac1fb3d service nova] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Updating instance_info_cache with network_info: [{"id": "3a727503-8134-40ef-91bb-d4d7be25f408", "address": "fa:16:3e:c6:fd:b2", "network": {"id": "217ced48-9485-4371-a119-08bf442b9bf5", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-471466315-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aebe6f27d0ef4b5f8106bec8e21fa089", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a727503-81", "ovs_interfaceid": "3a727503-8134-40ef-91bb-d4d7be25f408", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.640470] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416113, 'name': CreateVM_Task, 'duration_secs': 0.525879} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.640645] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1083.642937] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1083.642937] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.642937] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1083.642937] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8224046-5f51-4296-bfa9-fe0c09fa071c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.649221] env[62522]: DEBUG oslo_vmware.api [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Waiting for the task: (returnval){ [ 1083.649221] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d095ba-d618-4245-cdb9-0880e8b5b41a" [ 1083.649221] env[62522]: _type = "Task" [ 1083.649221] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.653411] env[62522]: DEBUG nova.network.neutron [req-57167260-d788-4b58-b000-7f665e1b98ee req-f73249ae-21b3-4ab6-8b46-beac18057e4b service nova] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Updated VIF entry in instance network info cache for port 319c60b7-98f1-4cf2-8a9a-d2c1009599bf. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1083.653736] env[62522]: DEBUG nova.network.neutron [req-57167260-d788-4b58-b000-7f665e1b98ee req-f73249ae-21b3-4ab6-8b46-beac18057e4b service nova] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Updating instance_info_cache with network_info: [{"id": "319c60b7-98f1-4cf2-8a9a-d2c1009599bf", "address": "fa:16:3e:5e:84:40", "network": {"id": "70e81afa-0eda-49c5-b072-e79b3c287468", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1715169983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff5da278d2be4ca983424c8291beadec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7654928b-7afe-42e3-a18d-68ecc775cefe", "external-id": "cl2-zone-807", "segmentation_id": 807, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap319c60b7-98", "ovs_interfaceid": "319c60b7-98f1-4cf2-8a9a-d2c1009599bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.660843] env[62522]: DEBUG oslo_vmware.api [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d095ba-d618-4245-cdb9-0880e8b5b41a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.780220] env[62522]: DEBUG oslo_vmware.api [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52aee90e-cd43-f2e6-3cdb-e6abc4368b89, 'name': SearchDatastore_Task, 'duration_secs': 0.011176} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.781069] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52efeab2-4eba-4cfc-a389-ea42fd55a3ec {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.791029] env[62522]: DEBUG oslo_vmware.api [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 1083.791029] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526280a2-b99d-ca0f-b786-cff8109f1319" [ 1083.791029] env[62522]: _type = "Task" [ 1083.791029] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.803436] env[62522]: DEBUG oslo_vmware.api [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526280a2-b99d-ca0f-b786-cff8109f1319, 'name': SearchDatastore_Task, 'duration_secs': 0.012393} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.803705] env[62522]: DEBUG oslo_concurrency.lockutils [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1083.803962] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 9337449d-5aff-4170-83ea-42fe2e9d1657/9337449d-5aff-4170-83ea-42fe2e9d1657.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1083.804253] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fa54ef74-a0f5-4b98-893a-32f11edb0975 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.812094] env[62522]: DEBUG oslo_vmware.api [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 1083.812094] env[62522]: value = "task-2416117" [ 1083.812094] env[62522]: _type = "Task" [ 1083.812094] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.821267] env[62522]: DEBUG oslo_vmware.api [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2416117, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.856737] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a4274098-47b8-471b-a453-972179a580ca tempest-ServersTestBootFromVolume-1262135564 tempest-ServersTestBootFromVolume-1262135564-project-member] Lock "35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.161s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1084.045241] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb96a521-bf50-4692-8fef-617e56ffab07 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.055434] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f86218da-5fd2-4a50-a1ce-8d55895fe22d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.093644] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a91d5883-1286-4e07-ae7e-44a1bc478b3e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.103377] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b84a61-fa2a-49d4-9a3a-4504126537ed {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.119527] env[62522]: DEBUG nova.compute.provider_tree [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1084.135615] env[62522]: DEBUG oslo_concurrency.lockutils [req-e484799f-bdce-46c6-a8d5-e4d630a13e7e req-cd3d1879-73bb-4a35-a873-51003ac1fb3d service nova] Releasing lock "refresh_cache-783d9ae7-67f5-4c54-81a7-6715b762afb3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1084.157420] env[62522]: DEBUG oslo_concurrency.lockutils [req-57167260-d788-4b58-b000-7f665e1b98ee req-f73249ae-21b3-4ab6-8b46-beac18057e4b service nova] Releasing lock "refresh_cache-9337449d-5aff-4170-83ea-42fe2e9d1657" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1084.161657] env[62522]: DEBUG oslo_vmware.api [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d095ba-d618-4245-cdb9-0880e8b5b41a, 'name': SearchDatastore_Task, 'duration_secs': 0.011927} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.162189] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1084.162331] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1084.162536] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1084.162685] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.162873] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1084.163181] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6402c67b-4a63-4f86-9816-c573ad8eb272 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.180758] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1084.181114] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1084.181682] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5bf32c31-a93a-453f-a382-47dc45b59b98 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.192721] env[62522]: DEBUG oslo_vmware.api [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Waiting for the task: (returnval){ [ 1084.192721] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d1fdac-15bc-c4ba-8a52-395a6b32f0a3" [ 1084.192721] env[62522]: _type = "Task" [ 1084.192721] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.203875] env[62522]: DEBUG oslo_vmware.api [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d1fdac-15bc-c4ba-8a52-395a6b32f0a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.300646] env[62522]: DEBUG nova.network.neutron [-] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.320306] env[62522]: DEBUG nova.compute.manager [req-eab3c06d-5a74-439c-8197-682627138592 req-4d648c6e-2f8f-40a9-8e03-bd71f3383621 service nova] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Received event network-vif-deleted-7e36641e-fc4a-4223-ab07-33dc49821168 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1084.320810] env[62522]: INFO nova.compute.manager [req-eab3c06d-5a74-439c-8197-682627138592 req-4d648c6e-2f8f-40a9-8e03-bd71f3383621 service nova] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Neutron deleted interface 7e36641e-fc4a-4223-ab07-33dc49821168; detaching it from the instance and deleting it from the info cache [ 1084.321084] env[62522]: DEBUG nova.network.neutron [req-eab3c06d-5a74-439c-8197-682627138592 req-4d648c6e-2f8f-40a9-8e03-bd71f3383621 service nova] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.330344] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "892926ef-3044-497c-8fc8-30cd298e4311" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1084.330577] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "892926ef-3044-497c-8fc8-30cd298e4311" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1084.331792] env[62522]: DEBUG oslo_vmware.api [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2416117, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496205} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.332630] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 9337449d-5aff-4170-83ea-42fe2e9d1657/9337449d-5aff-4170-83ea-42fe2e9d1657.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1084.332847] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1084.333355] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bcfdc660-8447-4f6a-9cd5-f88f1939ee97 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.341983] env[62522]: DEBUG oslo_vmware.api [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 1084.341983] env[62522]: value = "task-2416119" [ 1084.341983] env[62522]: _type = "Task" [ 1084.341983] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.351749] env[62522]: DEBUG oslo_vmware.api [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2416119, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.623653] env[62522]: DEBUG nova.scheduler.client.report [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1084.703741] env[62522]: DEBUG oslo_vmware.api [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d1fdac-15bc-c4ba-8a52-395a6b32f0a3, 'name': SearchDatastore_Task, 'duration_secs': 0.046693} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.704564] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56ccb204-9813-4815-b0b7-eb5d50d505d1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.711933] env[62522]: DEBUG oslo_vmware.api [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Waiting for the task: (returnval){ [ 1084.711933] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bf4a5d-b694-2586-5e9c-a5ae53924641" [ 1084.711933] env[62522]: _type = "Task" [ 1084.711933] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.722661] env[62522]: DEBUG oslo_vmware.api [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bf4a5d-b694-2586-5e9c-a5ae53924641, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.804621] env[62522]: INFO nova.compute.manager [-] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Took 1.36 seconds to deallocate network for instance. [ 1084.831852] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b60b3fbe-6383-462f-9a67-a0e53abad528 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.834273] env[62522]: DEBUG nova.compute.manager [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1084.849528] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ac25c2b-8e7e-423b-8a88-0ab14d9eae24 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.869220] env[62522]: DEBUG oslo_vmware.api [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2416119, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07297} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.869879] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1084.870806] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5b5b690-49f0-4ece-9324-6f445e75d7dd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.883689] env[62522]: DEBUG nova.compute.manager [req-eab3c06d-5a74-439c-8197-682627138592 req-4d648c6e-2f8f-40a9-8e03-bd71f3383621 service nova] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Detach interface failed, port_id=7e36641e-fc4a-4223-ab07-33dc49821168, reason: Instance 04a9d357-d094-487b-8f09-2f7e0c35f0d7 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1084.902078] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] 9337449d-5aff-4170-83ea-42fe2e9d1657/9337449d-5aff-4170-83ea-42fe2e9d1657.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1084.902690] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cf5f17cf-57cd-4b95-b975-bdd58fc0553a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.925427] env[62522]: DEBUG oslo_vmware.api [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 1084.925427] env[62522]: value = "task-2416120" [ 1084.925427] env[62522]: _type = "Task" [ 1084.925427] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.935124] env[62522]: DEBUG oslo_vmware.api [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2416120, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.129164] env[62522]: DEBUG oslo_concurrency.lockutils [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.802s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1085.131632] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.081s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1085.132350] env[62522]: DEBUG nova.objects.instance [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lazy-loading 'resources' on Instance uuid c28d2907-5b59-4df8-91a8-4ba0f2047d89 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1085.156083] env[62522]: INFO nova.scheduler.client.report [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Deleted allocations for instance 522e778b-6e01-4554-a3eb-dd1efa7870de [ 1085.174774] env[62522]: DEBUG oslo_vmware.rw_handles [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c73dcf-3395-fbe8-60ba-d0b6e7aa0642/disk-0.vmdk. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1085.174774] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db364e80-a01e-44c4-9a99-759bae050925 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.184166] env[62522]: DEBUG oslo_vmware.rw_handles [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c73dcf-3395-fbe8-60ba-d0b6e7aa0642/disk-0.vmdk is in state: ready. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1085.184166] env[62522]: ERROR oslo_vmware.rw_handles [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c73dcf-3395-fbe8-60ba-d0b6e7aa0642/disk-0.vmdk due to incomplete transfer. [ 1085.184326] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-641f9f1f-7430-4375-988d-d56308238610 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.194433] env[62522]: DEBUG oslo_vmware.rw_handles [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c73dcf-3395-fbe8-60ba-d0b6e7aa0642/disk-0.vmdk. {{(pid=62522) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1085.194788] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Uploaded image 4b03bdf9-70a0-4803-beb1-cbcd84dc8ac1 to the Glance image server {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1085.197526] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Destroying the VM {{(pid=62522) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1085.197899] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-d2452a7f-f7e1-48fd-b65b-4275c489b93a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.207517] env[62522]: DEBUG oslo_vmware.api [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1085.207517] env[62522]: value = "task-2416121" [ 1085.207517] env[62522]: _type = "Task" [ 1085.207517] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.220145] env[62522]: DEBUG oslo_vmware.api [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416121, 'name': Destroy_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.228552] env[62522]: DEBUG oslo_vmware.api [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bf4a5d-b694-2586-5e9c-a5ae53924641, 'name': SearchDatastore_Task, 'duration_secs': 0.012829} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.228805] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1085.229031] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 783d9ae7-67f5-4c54-81a7-6715b762afb3/783d9ae7-67f5-4c54-81a7-6715b762afb3.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1085.229455] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c921d990-94c7-4e31-a562-1ee1c3cb96e4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.239045] env[62522]: DEBUG oslo_vmware.api [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Waiting for the task: (returnval){ [ 1085.239045] env[62522]: value = "task-2416122" [ 1085.239045] env[62522]: _type = "Task" [ 1085.239045] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.249106] env[62522]: DEBUG oslo_vmware.api [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Task: {'id': task-2416122, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.314121] env[62522]: DEBUG oslo_concurrency.lockutils [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1085.358538] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1085.439024] env[62522]: DEBUG oslo_vmware.api [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2416120, 'name': ReconfigVM_Task, 'duration_secs': 0.322921} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.439512] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Reconfigured VM instance instance-0000005b to attach disk [datastore2] 9337449d-5aff-4170-83ea-42fe2e9d1657/9337449d-5aff-4170-83ea-42fe2e9d1657.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1085.440256] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bb5aa443-dfb3-4ec7-b162-eab40722c115 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.450160] env[62522]: DEBUG oslo_vmware.api [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 1085.450160] env[62522]: value = "task-2416123" [ 1085.450160] env[62522]: _type = "Task" [ 1085.450160] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.462971] env[62522]: DEBUG oslo_vmware.api [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2416123, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.636130] env[62522]: DEBUG nova.objects.instance [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lazy-loading 'numa_topology' on Instance uuid c28d2907-5b59-4df8-91a8-4ba0f2047d89 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1085.669443] env[62522]: DEBUG oslo_concurrency.lockutils [None req-88351cb4-667d-4664-8e9b-120d7b10f064 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "522e778b-6e01-4554-a3eb-dd1efa7870de" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.727s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1085.720038] env[62522]: DEBUG oslo_vmware.api [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416121, 'name': Destroy_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.751450] env[62522]: DEBUG oslo_vmware.api [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Task: {'id': task-2416122, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.960459] env[62522]: DEBUG oslo_vmware.api [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2416123, 'name': Rename_Task, 'duration_secs': 0.493931} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.960766] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1085.960971] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-429a4d0d-cd12-4664-a28a-8d1179114fc3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.969282] env[62522]: DEBUG oslo_vmware.api [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 1085.969282] env[62522]: value = "task-2416124" [ 1085.969282] env[62522]: _type = "Task" [ 1085.969282] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.978596] env[62522]: DEBUG oslo_vmware.api [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2416124, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.139370] env[62522]: DEBUG nova.objects.base [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62522) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1086.219311] env[62522]: DEBUG oslo_vmware.api [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416121, 'name': Destroy_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.251173] env[62522]: DEBUG oslo_vmware.api [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Task: {'id': task-2416122, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.604712} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.253664] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 783d9ae7-67f5-4c54-81a7-6715b762afb3/783d9ae7-67f5-4c54-81a7-6715b762afb3.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1086.253910] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1086.254584] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d8f7c24b-028a-4eef-9335-a956c88d658e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.262911] env[62522]: DEBUG oslo_vmware.api [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Waiting for the task: (returnval){ [ 1086.262911] env[62522]: value = "task-2416125" [ 1086.262911] env[62522]: _type = "Task" [ 1086.262911] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.275061] env[62522]: DEBUG oslo_vmware.api [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Task: {'id': task-2416125, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.309592] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a96e50b4-06f8-46d1-98db-6d16c57fe3ae {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.323998] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c0fcee-69f1-489e-b7e7-0e0674affd3f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.360308] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0735cbd-d975-4368-bdd7-09c58e4b04f5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.369507] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-439da8f7-f6d2-492e-8371-d25fecde75de {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.385338] env[62522]: DEBUG nova.compute.provider_tree [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1086.480686] env[62522]: DEBUG oslo_vmware.api [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2416124, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.675873] env[62522]: DEBUG oslo_concurrency.lockutils [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "5426087f-3dd0-4796-aa46-6020a3bda4f5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1086.676116] env[62522]: DEBUG oslo_concurrency.lockutils [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "5426087f-3dd0-4796-aa46-6020a3bda4f5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.719896] env[62522]: DEBUG oslo_vmware.api [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416121, 'name': Destroy_Task, 'duration_secs': 1.386544} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.720058] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Destroyed the VM [ 1086.720352] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Deleting Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1086.720625] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-1ebdcb53-2a51-42b1-8fc2-94b2673f003d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.728760] env[62522]: DEBUG oslo_vmware.api [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1086.728760] env[62522]: value = "task-2416127" [ 1086.728760] env[62522]: _type = "Task" [ 1086.728760] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.737653] env[62522]: DEBUG oslo_vmware.api [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416127, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.774009] env[62522]: DEBUG oslo_vmware.api [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Task: {'id': task-2416125, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075921} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.774318] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1086.775160] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-297c21f2-b541-4944-9ddb-c1c284909ef6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.798216] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] 783d9ae7-67f5-4c54-81a7-6715b762afb3/783d9ae7-67f5-4c54-81a7-6715b762afb3.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1086.798531] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-45b17953-e292-445c-9d98-974bc3ab24ee {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.819841] env[62522]: DEBUG oslo_vmware.api [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Waiting for the task: (returnval){ [ 1086.819841] env[62522]: value = "task-2416128" [ 1086.819841] env[62522]: _type = "Task" [ 1086.819841] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.828765] env[62522]: DEBUG oslo_vmware.api [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Task: {'id': task-2416128, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.889015] env[62522]: DEBUG nova.scheduler.client.report [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1086.981162] env[62522]: DEBUG oslo_vmware.api [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2416124, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.179158] env[62522]: DEBUG nova.compute.manager [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1087.239304] env[62522]: DEBUG oslo_vmware.api [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416127, 'name': RemoveSnapshot_Task} progress is 65%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.275527] env[62522]: DEBUG oslo_concurrency.lockutils [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Acquiring lock "92604d35-7e59-45b0-9dce-32e515703936" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1087.275763] env[62522]: DEBUG oslo_concurrency.lockutils [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Lock "92604d35-7e59-45b0-9dce-32e515703936" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1087.333193] env[62522]: DEBUG oslo_vmware.api [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Task: {'id': task-2416128, 'name': ReconfigVM_Task, 'duration_secs': 0.407824} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.333648] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Reconfigured VM instance instance-0000005c to attach disk [datastore2] 783d9ae7-67f5-4c54-81a7-6715b762afb3/783d9ae7-67f5-4c54-81a7-6715b762afb3.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1087.334189] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-73e73ecb-944d-4389-9174-35850c728525 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.343680] env[62522]: DEBUG oslo_vmware.api [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Waiting for the task: (returnval){ [ 1087.343680] env[62522]: value = "task-2416129" [ 1087.343680] env[62522]: _type = "Task" [ 1087.343680] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.354581] env[62522]: DEBUG oslo_vmware.api [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Task: {'id': task-2416129, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.393736] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.262s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1087.396236] env[62522]: DEBUG oslo_concurrency.lockutils [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.927s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1087.397718] env[62522]: INFO nova.compute.claims [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1087.482179] env[62522]: DEBUG oslo_vmware.api [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2416124, 'name': PowerOnVM_Task, 'duration_secs': 1.425013} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.482526] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1087.482873] env[62522]: INFO nova.compute.manager [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Took 10.16 seconds to spawn the instance on the hypervisor. [ 1087.483252] env[62522]: DEBUG nova.compute.manager [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1087.484045] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501d6543-ca7f-4ccd-ae10-f53a96320eae {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.705365] env[62522]: DEBUG oslo_concurrency.lockutils [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1087.739432] env[62522]: DEBUG oslo_vmware.api [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416127, 'name': RemoveSnapshot_Task, 'duration_secs': 0.61409} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.739980] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Deleted Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1087.739980] env[62522]: INFO nova.compute.manager [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Took 16.39 seconds to snapshot the instance on the hypervisor. [ 1087.779854] env[62522]: DEBUG nova.compute.manager [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1087.782632] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "cd69a052-369b-4809-baf0-a1aec44f4ab5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1087.782858] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "cd69a052-369b-4809-baf0-a1aec44f4ab5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1087.783070] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "cd69a052-369b-4809-baf0-a1aec44f4ab5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1087.783330] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "cd69a052-369b-4809-baf0-a1aec44f4ab5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1087.783422] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "cd69a052-369b-4809-baf0-a1aec44f4ab5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1087.785873] env[62522]: INFO nova.compute.manager [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Terminating instance [ 1087.857620] env[62522]: DEBUG oslo_vmware.api [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Task: {'id': task-2416129, 'name': Rename_Task, 'duration_secs': 0.255078} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.857856] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1087.858348] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2b573ed3-665b-4527-b73b-0f24da8ed9d8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.867147] env[62522]: DEBUG oslo_vmware.api [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Waiting for the task: (returnval){ [ 1087.867147] env[62522]: value = "task-2416130" [ 1087.867147] env[62522]: _type = "Task" [ 1087.867147] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.875759] env[62522]: DEBUG oslo_vmware.api [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Task: {'id': task-2416130, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.905819] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a60ac9f3-1f61-466a-a6b4-e0e8f8d34eca tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "c28d2907-5b59-4df8-91a8-4ba0f2047d89" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 28.906s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1087.907497] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "c28d2907-5b59-4df8-91a8-4ba0f2047d89" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 5.265s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1087.907497] env[62522]: INFO nova.compute.manager [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Unshelving [ 1088.004469] env[62522]: INFO nova.compute.manager [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Took 16.18 seconds to build instance. [ 1088.289507] env[62522]: DEBUG nova.compute.manager [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1088.289902] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1088.293275] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00adc659-8c24-48e8-b170-c7e5ed06853c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.305291] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1088.305563] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2ee022b2-b051-4d0d-95aa-64a40e16c840 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.311392] env[62522]: DEBUG nova.compute.manager [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Found 3 images (rotation: 2) {{(pid=62522) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1088.311661] env[62522]: DEBUG nova.compute.manager [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Rotating out 1 backups {{(pid=62522) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5025}} [ 1088.311868] env[62522]: DEBUG nova.compute.manager [None req-bc7a4374-a5f7-4ac0-98fd-7577dd6f6d8e tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Deleting image 0f0df71d-7d6e-452b-9dfd-236a14f4f7a2 {{(pid=62522) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5030}} [ 1088.316164] env[62522]: DEBUG oslo_concurrency.lockutils [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1088.331088] env[62522]: DEBUG oslo_vmware.api [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 1088.331088] env[62522]: value = "task-2416131" [ 1088.331088] env[62522]: _type = "Task" [ 1088.331088] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.343773] env[62522]: DEBUG oslo_vmware.api [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2416131, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.377780] env[62522]: DEBUG oslo_vmware.api [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Task: {'id': task-2416130, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.507292] env[62522]: DEBUG oslo_concurrency.lockutils [None req-87db3d60-beb6-4d14-a400-d271c31934e5 tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "9337449d-5aff-4170-83ea-42fe2e9d1657" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.702s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1088.610681] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ba4162d-f327-47de-826b-3251605bca08 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.620399] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23cb2dd6-3585-4892-8cab-227a6dc99e69 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.653931] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b717fe67-6883-4e73-8060-fc62968545f7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.662819] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f065fc9-9671-45aa-ac77-878cf292781a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.677345] env[62522]: DEBUG nova.compute.provider_tree [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1088.822887] env[62522]: DEBUG nova.compute.manager [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1088.824032] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e42860ee-760a-49f2-bed2-a3b0b74b9291 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.848987] env[62522]: DEBUG oslo_vmware.api [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2416131, 'name': PowerOffVM_Task, 'duration_secs': 0.237706} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.849105] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1088.849208] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1088.850401] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-371fd266-f270-4b61-bb79-c0cfae397602 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.879512] env[62522]: DEBUG oslo_vmware.api [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Task: {'id': task-2416130, 'name': PowerOnVM_Task, 'duration_secs': 0.777202} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.879871] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1088.880112] env[62522]: INFO nova.compute.manager [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Took 9.08 seconds to spawn the instance on the hypervisor. [ 1088.880307] env[62522]: DEBUG nova.compute.manager [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1088.881275] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3928cae-abbd-4106-b54f-57e334ed6f20 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.922027] env[62522]: DEBUG nova.compute.utils [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1088.928700] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1088.928993] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1088.929274] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Deleting the datastore file [datastore1] cd69a052-369b-4809-baf0-a1aec44f4ab5 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1088.930403] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-51542812-84c6-4cc6-b56c-a4b24b1e7e60 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.940678] env[62522]: DEBUG oslo_vmware.api [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 1088.940678] env[62522]: value = "task-2416134" [ 1088.940678] env[62522]: _type = "Task" [ 1088.940678] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.950672] env[62522]: DEBUG oslo_vmware.api [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2416134, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.180915] env[62522]: DEBUG nova.scheduler.client.report [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1089.219731] env[62522]: DEBUG oslo_concurrency.lockutils [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "9337449d-5aff-4170-83ea-42fe2e9d1657" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1089.219863] env[62522]: DEBUG oslo_concurrency.lockutils [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "9337449d-5aff-4170-83ea-42fe2e9d1657" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1089.220155] env[62522]: DEBUG oslo_concurrency.lockutils [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "9337449d-5aff-4170-83ea-42fe2e9d1657-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1089.220288] env[62522]: DEBUG oslo_concurrency.lockutils [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "9337449d-5aff-4170-83ea-42fe2e9d1657-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1089.220448] env[62522]: DEBUG oslo_concurrency.lockutils [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "9337449d-5aff-4170-83ea-42fe2e9d1657-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.222720] env[62522]: INFO nova.compute.manager [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Terminating instance [ 1089.342441] env[62522]: INFO nova.compute.manager [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] instance snapshotting [ 1089.343135] env[62522]: DEBUG nova.objects.instance [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lazy-loading 'flavor' on Instance uuid 7f8a8270-5014-446c-aa42-ea0b4079e5a9 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1089.400254] env[62522]: INFO nova.compute.manager [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Took 14.66 seconds to build instance. [ 1089.425970] env[62522]: INFO nova.virt.block_device [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Booting with volume 1a6a964c-b9d4-4849-bb10-c20d35c6b3ec at /dev/sdb [ 1089.455651] env[62522]: DEBUG oslo_vmware.api [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2416134, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.320706} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.456079] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1089.456403] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1089.456715] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1089.458031] env[62522]: INFO nova.compute.manager [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1089.458031] env[62522]: DEBUG oslo.service.loopingcall [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1089.458031] env[62522]: DEBUG nova.compute.manager [-] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1089.458031] env[62522]: DEBUG nova.network.neutron [-] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1089.464617] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-12af5b75-7785-4e45-8d6f-eab6e55ba28c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.474816] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52e51123-1cbb-4a3b-8b56-33b02892fe06 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.507275] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0394ab9e-821a-4160-b38c-4d9657dce24a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.517415] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1ac6ff0-276c-4b22-89b6-f61ce169ac15 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.547851] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e1fc08e-532d-41a1-9ef4-4d271b44ddea {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.555505] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe91e52-bc8a-44c5-87fe-b5d8c8dcb95e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.573375] env[62522]: DEBUG nova.virt.block_device [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Updating existing volume attachment record: 5ae0885a-56b8-48d7-bd72-71043df2bc65 {{(pid=62522) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1089.689024] env[62522]: DEBUG oslo_concurrency.lockutils [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.290s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.689024] env[62522]: DEBUG nova.compute.manager [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1089.689993] env[62522]: DEBUG oslo_concurrency.lockutils [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.376s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1089.690364] env[62522]: DEBUG nova.objects.instance [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lazy-loading 'resources' on Instance uuid 04a9d357-d094-487b-8f09-2f7e0c35f0d7 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1089.729919] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Acquiring lock "783d9ae7-67f5-4c54-81a7-6715b762afb3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1089.731607] env[62522]: DEBUG nova.compute.manager [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1089.731843] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1089.732934] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eea6f292-8ba6-42d3-9407-432d259ff988 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.743420] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1089.744471] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4d943587-3007-4727-bcff-671961f211e1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.746891] env[62522]: DEBUG nova.compute.manager [req-b3a80d62-2906-478a-bc0a-e51d636636ec req-7fb77fc1-bef7-48e2-9458-53ccbcb60266 service nova] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Received event network-vif-deleted-6683db6f-edf5-4273-b92a-cb688e7baa82 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1089.747096] env[62522]: INFO nova.compute.manager [req-b3a80d62-2906-478a-bc0a-e51d636636ec req-7fb77fc1-bef7-48e2-9458-53ccbcb60266 service nova] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Neutron deleted interface 6683db6f-edf5-4273-b92a-cb688e7baa82; detaching it from the instance and deleting it from the info cache [ 1089.747270] env[62522]: DEBUG nova.network.neutron [req-b3a80d62-2906-478a-bc0a-e51d636636ec req-7fb77fc1-bef7-48e2-9458-53ccbcb60266 service nova] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1089.754099] env[62522]: DEBUG oslo_vmware.api [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 1089.754099] env[62522]: value = "task-2416136" [ 1089.754099] env[62522]: _type = "Task" [ 1089.754099] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.763417] env[62522]: DEBUG oslo_vmware.api [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2416136, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.849354] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bee4d3d-c1f9-40eb-9069-1d6302b08991 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.870363] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80d212b5-1098-4f2c-af1f-c098db0c4b9d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.902162] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b72fd0e2-3e5e-4dd3-8e18-033e814ef206 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Lock "783d9ae7-67f5-4c54-81a7-6715b762afb3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.198s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.902557] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Lock "783d9ae7-67f5-4c54-81a7-6715b762afb3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.173s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1089.902793] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Acquiring lock "783d9ae7-67f5-4c54-81a7-6715b762afb3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1089.903062] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Lock "783d9ae7-67f5-4c54-81a7-6715b762afb3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1089.903297] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Lock "783d9ae7-67f5-4c54-81a7-6715b762afb3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.905400] env[62522]: INFO nova.compute.manager [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Terminating instance [ 1090.195927] env[62522]: DEBUG nova.compute.utils [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1090.200725] env[62522]: DEBUG nova.compute.manager [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1090.200725] env[62522]: DEBUG nova.network.neutron [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1090.228749] env[62522]: DEBUG nova.network.neutron [-] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.247362] env[62522]: DEBUG nova.policy [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f26eeb125397426baca60d80d635c4b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a06421250694a98b13ff34ad816dc75', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1090.251731] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-63304f5e-07d9-4333-8515-c764bc1a74ab {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.266342] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bbaa7a0-1e56-4fa8-b5ae-3ffc692f4450 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.277308] env[62522]: DEBUG oslo_vmware.api [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2416136, 'name': PowerOffVM_Task, 'duration_secs': 0.415634} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.280674] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1090.280851] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1090.281643] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-26f0e399-2186-4358-be0c-0b8be8373a88 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.303496] env[62522]: DEBUG nova.compute.manager [req-b3a80d62-2906-478a-bc0a-e51d636636ec req-7fb77fc1-bef7-48e2-9458-53ccbcb60266 service nova] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Detach interface failed, port_id=6683db6f-edf5-4273-b92a-cb688e7baa82, reason: Instance cd69a052-369b-4809-baf0-a1aec44f4ab5 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1090.356665] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1090.356929] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1090.357135] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Deleting the datastore file [datastore2] 9337449d-5aff-4170-83ea-42fe2e9d1657 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1090.357410] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-904c5137-9407-42b4-8c84-29ee567d0c87 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.365746] env[62522]: DEBUG oslo_vmware.api [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for the task: (returnval){ [ 1090.365746] env[62522]: value = "task-2416140" [ 1090.365746] env[62522]: _type = "Task" [ 1090.365746] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.375597] env[62522]: DEBUG oslo_vmware.api [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2416140, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.380952] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Creating Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1090.381600] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-355dfd51-e2ab-46fb-81c6-684b733077f4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.394421] env[62522]: DEBUG oslo_vmware.api [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1090.394421] env[62522]: value = "task-2416141" [ 1090.394421] env[62522]: _type = "Task" [ 1090.394421] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.401844] env[62522]: DEBUG oslo_vmware.api [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416141, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.408937] env[62522]: DEBUG nova.compute.manager [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1090.409380] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1090.410510] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54a1e820-dc2a-40fb-b1cb-dcf28da2cc50 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.419186] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1090.419561] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a5e5aa74-f80a-45c4-82bf-bd3195c4ba5f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.429992] env[62522]: DEBUG oslo_vmware.api [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Waiting for the task: (returnval){ [ 1090.429992] env[62522]: value = "task-2416142" [ 1090.429992] env[62522]: _type = "Task" [ 1090.429992] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.435327] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eae33d2-4cf5-4dd6-b3a2-688bbd37b8b6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.447252] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d548363-cb11-4a62-ba13-320f79a286f6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.450942] env[62522]: DEBUG oslo_vmware.api [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Task: {'id': task-2416142, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.482161] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6704b9c4-6c53-46da-976f-a4e8b877f6ef {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.492293] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a840eb-679c-4f43-a7f8-ae543cabc961 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.509159] env[62522]: DEBUG nova.compute.provider_tree [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1090.563963] env[62522]: DEBUG nova.network.neutron [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Successfully created port: e4e04bfe-9037-47b9-beb1-059047be1cde {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1090.704363] env[62522]: DEBUG nova.compute.manager [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1090.731138] env[62522]: INFO nova.compute.manager [-] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Took 1.27 seconds to deallocate network for instance. [ 1090.878538] env[62522]: DEBUG oslo_vmware.api [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Task: {'id': task-2416140, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.221083} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.878800] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1090.878979] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1090.879168] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1090.879344] env[62522]: INFO nova.compute.manager [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1090.879593] env[62522]: DEBUG oslo.service.loopingcall [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1090.879784] env[62522]: DEBUG nova.compute.manager [-] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1090.879875] env[62522]: DEBUG nova.network.neutron [-] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1090.901873] env[62522]: DEBUG oslo_vmware.api [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416141, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.940053] env[62522]: DEBUG oslo_vmware.api [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Task: {'id': task-2416142, 'name': PowerOffVM_Task, 'duration_secs': 0.214341} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.940343] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1090.940511] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1090.940764] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f559c9e9-7f3f-43e2-bd83-e3fe3a711b22 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.004229] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1091.004525] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1091.004616] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Deleting the datastore file [datastore2] 783d9ae7-67f5-4c54-81a7-6715b762afb3 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1091.004838] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8e7aee97-e350-4761-af72-ba2cddcd2b6f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.013113] env[62522]: DEBUG nova.scheduler.client.report [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1091.016378] env[62522]: DEBUG oslo_vmware.api [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Waiting for the task: (returnval){ [ 1091.016378] env[62522]: value = "task-2416145" [ 1091.016378] env[62522]: _type = "Task" [ 1091.016378] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.025272] env[62522]: DEBUG oslo_vmware.api [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Task: {'id': task-2416145, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.240571] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.302942] env[62522]: DEBUG nova.compute.manager [req-f1e5d78a-d85f-4db2-b4c6-2217534c4b8d req-4dbd4e9c-c657-482c-9add-b3a9b2364090 service nova] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Received event network-vif-deleted-319c60b7-98f1-4cf2-8a9a-d2c1009599bf {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1091.302942] env[62522]: INFO nova.compute.manager [req-f1e5d78a-d85f-4db2-b4c6-2217534c4b8d req-4dbd4e9c-c657-482c-9add-b3a9b2364090 service nova] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Neutron deleted interface 319c60b7-98f1-4cf2-8a9a-d2c1009599bf; detaching it from the instance and deleting it from the info cache [ 1091.302942] env[62522]: DEBUG nova.network.neutron [req-f1e5d78a-d85f-4db2-b4c6-2217534c4b8d req-4dbd4e9c-c657-482c-9add-b3a9b2364090 service nova] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.403662] env[62522]: DEBUG oslo_vmware.api [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416141, 'name': CreateSnapshot_Task, 'duration_secs': 0.749606} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.403920] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Created Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1091.404651] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a953c0-c5f6-499f-b3d8-2c610fd62507 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.518187] env[62522]: DEBUG oslo_concurrency.lockutils [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.828s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.522188] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.163s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.523906] env[62522]: INFO nova.compute.claims [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1091.536170] env[62522]: DEBUG oslo_vmware.api [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Task: {'id': task-2416145, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137575} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.536423] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1091.536691] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1091.536895] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1091.537068] env[62522]: INFO nova.compute.manager [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1091.537580] env[62522]: DEBUG oslo.service.loopingcall [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1091.537847] env[62522]: DEBUG nova.compute.manager [-] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1091.537912] env[62522]: DEBUG nova.network.neutron [-] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1091.543713] env[62522]: INFO nova.scheduler.client.report [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Deleted allocations for instance 04a9d357-d094-487b-8f09-2f7e0c35f0d7 [ 1091.715685] env[62522]: DEBUG nova.compute.manager [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1091.741431] env[62522]: DEBUG nova.virt.hardware [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1091.741652] env[62522]: DEBUG nova.virt.hardware [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1091.741805] env[62522]: DEBUG nova.virt.hardware [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1091.742009] env[62522]: DEBUG nova.virt.hardware [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1091.742206] env[62522]: DEBUG nova.virt.hardware [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1091.742360] env[62522]: DEBUG nova.virt.hardware [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1091.742569] env[62522]: DEBUG nova.virt.hardware [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1091.742730] env[62522]: DEBUG nova.virt.hardware [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1091.742890] env[62522]: DEBUG nova.virt.hardware [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1091.743087] env[62522]: DEBUG nova.virt.hardware [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1091.743278] env[62522]: DEBUG nova.virt.hardware [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1091.744336] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50a36711-89ce-4259-8e67-a3c5316781e6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.753729] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dac821b3-b16d-479d-9d06-a2a51d141fe3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.774420] env[62522]: DEBUG nova.network.neutron [-] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.805452] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f27e270a-26f6-406c-9cc0-41cc265790e2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.816231] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f90359f1-d75a-4fc1-9e13-a774e40aae5a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.853742] env[62522]: DEBUG nova.compute.manager [req-f1e5d78a-d85f-4db2-b4c6-2217534c4b8d req-4dbd4e9c-c657-482c-9add-b3a9b2364090 service nova] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Detach interface failed, port_id=319c60b7-98f1-4cf2-8a9a-d2c1009599bf, reason: Instance 9337449d-5aff-4170-83ea-42fe2e9d1657 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1091.921609] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Creating linked-clone VM from snapshot {{(pid=62522) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1091.921979] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-caef1ee5-8d93-49a3-8e42-f5f8346f3805 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.933023] env[62522]: DEBUG oslo_vmware.api [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1091.933023] env[62522]: value = "task-2416146" [ 1091.933023] env[62522]: _type = "Task" [ 1091.933023] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.944209] env[62522]: DEBUG oslo_vmware.api [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416146, 'name': CloneVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.981239] env[62522]: DEBUG nova.compute.manager [req-762c8d14-33b0-4d7f-a0ee-f08ed50ff134 req-e5efe19e-504d-4a3c-b9ec-3750c6f491b5 service nova] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Received event network-vif-plugged-e4e04bfe-9037-47b9-beb1-059047be1cde {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1091.981488] env[62522]: DEBUG oslo_concurrency.lockutils [req-762c8d14-33b0-4d7f-a0ee-f08ed50ff134 req-e5efe19e-504d-4a3c-b9ec-3750c6f491b5 service nova] Acquiring lock "e369d9e1-1345-4038-b5f3-f816fe767a72-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.981664] env[62522]: DEBUG oslo_concurrency.lockutils [req-762c8d14-33b0-4d7f-a0ee-f08ed50ff134 req-e5efe19e-504d-4a3c-b9ec-3750c6f491b5 service nova] Lock "e369d9e1-1345-4038-b5f3-f816fe767a72-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.981826] env[62522]: DEBUG oslo_concurrency.lockutils [req-762c8d14-33b0-4d7f-a0ee-f08ed50ff134 req-e5efe19e-504d-4a3c-b9ec-3750c6f491b5 service nova] Lock "e369d9e1-1345-4038-b5f3-f816fe767a72-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.982385] env[62522]: DEBUG nova.compute.manager [req-762c8d14-33b0-4d7f-a0ee-f08ed50ff134 req-e5efe19e-504d-4a3c-b9ec-3750c6f491b5 service nova] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] No waiting events found dispatching network-vif-plugged-e4e04bfe-9037-47b9-beb1-059047be1cde {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1091.982685] env[62522]: WARNING nova.compute.manager [req-762c8d14-33b0-4d7f-a0ee-f08ed50ff134 req-e5efe19e-504d-4a3c-b9ec-3750c6f491b5 service nova] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Received unexpected event network-vif-plugged-e4e04bfe-9037-47b9-beb1-059047be1cde for instance with vm_state building and task_state spawning. [ 1092.051255] env[62522]: DEBUG oslo_concurrency.lockutils [None req-16e884e5-77ec-4a44-a36d-f891691dab72 tempest-ServersNegativeTestJSON-922500168 tempest-ServersNegativeTestJSON-922500168-project-member] Lock "04a9d357-d094-487b-8f09-2f7e0c35f0d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.276s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1092.100446] env[62522]: DEBUG nova.network.neutron [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Successfully updated port: e4e04bfe-9037-47b9-beb1-059047be1cde {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1092.279045] env[62522]: INFO nova.compute.manager [-] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Took 1.40 seconds to deallocate network for instance. [ 1092.356769] env[62522]: DEBUG nova.network.neutron [-] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.442551] env[62522]: DEBUG oslo_vmware.api [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416146, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.602025] env[62522]: DEBUG oslo_concurrency.lockutils [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "refresh_cache-e369d9e1-1345-4038-b5f3-f816fe767a72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1092.602243] env[62522]: DEBUG oslo_concurrency.lockutils [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquired lock "refresh_cache-e369d9e1-1345-4038-b5f3-f816fe767a72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.602340] env[62522]: DEBUG nova.network.neutron [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1092.716013] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4efa674c-e1b7-4c35-af74-4378f4497c0d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.726793] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b84b662-21b5-4e27-b345-242d51ae419f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.759743] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fda1b6a-1afe-434f-bc32-7a5690dcd620 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.768564] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8e12882-ca83-4cc7-b654-3fb5813c8847 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.783302] env[62522]: DEBUG nova.compute.provider_tree [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1092.788910] env[62522]: DEBUG oslo_concurrency.lockutils [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1092.862397] env[62522]: INFO nova.compute.manager [-] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Took 1.32 seconds to deallocate network for instance. [ 1092.942896] env[62522]: DEBUG oslo_vmware.api [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416146, 'name': CloneVM_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.148909] env[62522]: DEBUG nova.network.neutron [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1093.288129] env[62522]: DEBUG nova.scheduler.client.report [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1093.368861] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.407611] env[62522]: DEBUG nova.network.neutron [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Updating instance_info_cache with network_info: [{"id": "e4e04bfe-9037-47b9-beb1-059047be1cde", "address": "fa:16:3e:c8:ba:dd", "network": {"id": "21d0ca22-5509-4f9b-b167-f9fde2dac808", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-547933771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a06421250694a98b13ff34ad816dc75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4e04bfe-90", "ovs_interfaceid": "e4e04bfe-9037-47b9-beb1-059047be1cde", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.414017] env[62522]: DEBUG nova.compute.manager [req-77130a67-c0b9-4ad4-9ccd-d00f94a334e6 req-d1dc168e-bd51-40d7-a6fe-1b25fa7ee76f service nova] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Received event network-vif-deleted-3a727503-8134-40ef-91bb-d4d7be25f408 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1093.444973] env[62522]: DEBUG oslo_vmware.api [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416146, 'name': CloneVM_Task, 'duration_secs': 1.052522} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.445271] env[62522]: INFO nova.virt.vmwareapi.vmops [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Created linked-clone VM from snapshot [ 1093.446037] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01b39f2e-259a-4265-a3a4-5a6a6bde542a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.455554] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Uploading image 2e900327-5daf-401c-97e2-b519e8d896d0 {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1093.486248] env[62522]: DEBUG oslo_vmware.rw_handles [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1093.486248] env[62522]: value = "vm-489821" [ 1093.486248] env[62522]: _type = "VirtualMachine" [ 1093.486248] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1093.486723] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-8bd2dcbe-8bb1-47ef-a53b-e1ebbb99fcb2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.497153] env[62522]: DEBUG oslo_vmware.rw_handles [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lease: (returnval){ [ 1093.497153] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5244d41c-807e-4376-caf7-c5d7a941433d" [ 1093.497153] env[62522]: _type = "HttpNfcLease" [ 1093.497153] env[62522]: } obtained for exporting VM: (result){ [ 1093.497153] env[62522]: value = "vm-489821" [ 1093.497153] env[62522]: _type = "VirtualMachine" [ 1093.497153] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1093.497153] env[62522]: DEBUG oslo_vmware.api [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the lease: (returnval){ [ 1093.497153] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5244d41c-807e-4376-caf7-c5d7a941433d" [ 1093.497153] env[62522]: _type = "HttpNfcLease" [ 1093.497153] env[62522]: } to be ready. {{(pid=62522) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1093.504633] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1093.504633] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5244d41c-807e-4376-caf7-c5d7a941433d" [ 1093.504633] env[62522]: _type = "HttpNfcLease" [ 1093.504633] env[62522]: } is initializing. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1093.624831] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquiring lock "3c4c395c-0625-4569-990d-e2d4ad162c14" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.625351] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "3c4c395c-0625-4569-990d-e2d4ad162c14" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.625599] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquiring lock "3c4c395c-0625-4569-990d-e2d4ad162c14-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.625892] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "3c4c395c-0625-4569-990d-e2d4ad162c14-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.626127] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "3c4c395c-0625-4569-990d-e2d4ad162c14-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.628509] env[62522]: INFO nova.compute.manager [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Terminating instance [ 1093.801033] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.280s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.801849] env[62522]: DEBUG nova.compute.manager [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1093.805245] env[62522]: DEBUG oslo_concurrency.lockutils [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.100s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.806769] env[62522]: INFO nova.compute.claims [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1093.912537] env[62522]: DEBUG oslo_concurrency.lockutils [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Releasing lock "refresh_cache-e369d9e1-1345-4038-b5f3-f816fe767a72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1093.913209] env[62522]: DEBUG nova.compute.manager [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Instance network_info: |[{"id": "e4e04bfe-9037-47b9-beb1-059047be1cde", "address": "fa:16:3e:c8:ba:dd", "network": {"id": "21d0ca22-5509-4f9b-b167-f9fde2dac808", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-547933771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a06421250694a98b13ff34ad816dc75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4e04bfe-90", "ovs_interfaceid": "e4e04bfe-9037-47b9-beb1-059047be1cde", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1093.913356] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c8:ba:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24144f5a-050a-4f1e-8d8c-774dc16dc791', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e4e04bfe-9037-47b9-beb1-059047be1cde', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1093.920622] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Creating folder: Project (4a06421250694a98b13ff34ad816dc75). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1093.920892] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-84a44ddf-ae4b-4218-a89e-9c22d7a235e8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.934188] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Created folder: Project (4a06421250694a98b13ff34ad816dc75) in parent group-v489562. [ 1093.934457] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Creating folder: Instances. Parent ref: group-v489822. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1093.934710] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-48567bc3-703f-4439-a199-6aea6dd60e69 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.946334] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Created folder: Instances in parent group-v489822. [ 1093.946605] env[62522]: DEBUG oslo.service.loopingcall [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1093.946751] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1093.946962] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ebedfe7b-478e-4b40-aad4-21c5740040e3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.967596] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1093.967596] env[62522]: value = "task-2416152" [ 1093.967596] env[62522]: _type = "Task" [ 1093.967596] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.975902] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416152, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.006120] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1094.006120] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5244d41c-807e-4376-caf7-c5d7a941433d" [ 1094.006120] env[62522]: _type = "HttpNfcLease" [ 1094.006120] env[62522]: } is ready. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1094.006120] env[62522]: DEBUG oslo_vmware.rw_handles [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1094.006120] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5244d41c-807e-4376-caf7-c5d7a941433d" [ 1094.006120] env[62522]: _type = "HttpNfcLease" [ 1094.006120] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1094.006120] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af2ba874-dc9d-48fa-8638-2c3cd7d8f223 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.014766] env[62522]: DEBUG oslo_vmware.rw_handles [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ac7f1d-a176-5526-4baa-18bc3de24bf2/disk-0.vmdk from lease info. {{(pid=62522) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1094.015323] env[62522]: DEBUG oslo_vmware.rw_handles [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ac7f1d-a176-5526-4baa-18bc3de24bf2/disk-0.vmdk for reading. {{(pid=62522) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1094.020243] env[62522]: DEBUG nova.compute.manager [req-5f0bb6da-d105-4de5-9da0-17896284f560 req-569dd12b-227e-4bce-8c0a-f36f9c273257 service nova] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Received event network-changed-e4e04bfe-9037-47b9-beb1-059047be1cde {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1094.020599] env[62522]: DEBUG nova.compute.manager [req-5f0bb6da-d105-4de5-9da0-17896284f560 req-569dd12b-227e-4bce-8c0a-f36f9c273257 service nova] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Refreshing instance network info cache due to event network-changed-e4e04bfe-9037-47b9-beb1-059047be1cde. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1094.020997] env[62522]: DEBUG oslo_concurrency.lockutils [req-5f0bb6da-d105-4de5-9da0-17896284f560 req-569dd12b-227e-4bce-8c0a-f36f9c273257 service nova] Acquiring lock "refresh_cache-e369d9e1-1345-4038-b5f3-f816fe767a72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1094.021194] env[62522]: DEBUG oslo_concurrency.lockutils [req-5f0bb6da-d105-4de5-9da0-17896284f560 req-569dd12b-227e-4bce-8c0a-f36f9c273257 service nova] Acquired lock "refresh_cache-e369d9e1-1345-4038-b5f3-f816fe767a72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1094.021480] env[62522]: DEBUG nova.network.neutron [req-5f0bb6da-d105-4de5-9da0-17896284f560 req-569dd12b-227e-4bce-8c0a-f36f9c273257 service nova] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Refreshing network info cache for port e4e04bfe-9037-47b9-beb1-059047be1cde {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1094.125536] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ea2f571a-6a42-4bae-973a-0667adbbb118 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.135830] env[62522]: DEBUG nova.compute.manager [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1094.136051] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1094.137168] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf18101-86e0-4209-9006-7b6dd9e3fd9e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.146471] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1094.148752] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ff351dae-271e-4a98-8866-8b43320f1264 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.160175] env[62522]: DEBUG oslo_vmware.api [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 1094.160175] env[62522]: value = "task-2416153" [ 1094.160175] env[62522]: _type = "Task" [ 1094.160175] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.174384] env[62522]: DEBUG oslo_vmware.api [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416153, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.307585] env[62522]: DEBUG nova.compute.utils [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1094.309738] env[62522]: DEBUG nova.compute.manager [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1094.309738] env[62522]: DEBUG nova.network.neutron [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1094.380178] env[62522]: DEBUG nova.policy [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab5e5a8e6ee64aad8d52342ee3f5af36', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4bdd1f5caf09454d808bcdc15df2d3a7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1094.480145] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416152, 'name': CreateVM_Task, 'duration_secs': 0.351817} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.480556] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1094.481965] env[62522]: DEBUG oslo_concurrency.lockutils [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1094.482358] env[62522]: DEBUG oslo_concurrency.lockutils [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1094.482911] env[62522]: DEBUG oslo_concurrency.lockutils [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1094.483259] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-379f4bba-c3e3-4946-a104-ac1534e6299f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.489273] env[62522]: DEBUG oslo_vmware.api [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1094.489273] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52076eac-9770-c8d0-ec2f-69abac588bbd" [ 1094.489273] env[62522]: _type = "Task" [ 1094.489273] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.499989] env[62522]: DEBUG oslo_vmware.api [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52076eac-9770-c8d0-ec2f-69abac588bbd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.675298] env[62522]: DEBUG oslo_vmware.api [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416153, 'name': PowerOffVM_Task, 'duration_secs': 0.201138} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.679229] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1094.679581] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1094.683938] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a3fc9749-4a28-417b-88c7-56e9924df39b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.762340] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1094.762694] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1094.762907] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Deleting the datastore file [datastore2] 3c4c395c-0625-4569-990d-e2d4ad162c14 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1094.763264] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b7c4c10b-b687-430a-89d0-879f82625361 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.775106] env[62522]: DEBUG oslo_vmware.api [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 1094.775106] env[62522]: value = "task-2416156" [ 1094.775106] env[62522]: _type = "Task" [ 1094.775106] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.784617] env[62522]: DEBUG oslo_vmware.api [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416156, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.812996] env[62522]: DEBUG nova.compute.manager [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1094.819662] env[62522]: DEBUG nova.network.neutron [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Successfully created port: 55c5c37a-1605-4edb-957e-04160d41ff01 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1094.850519] env[62522]: DEBUG nova.network.neutron [req-5f0bb6da-d105-4de5-9da0-17896284f560 req-569dd12b-227e-4bce-8c0a-f36f9c273257 service nova] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Updated VIF entry in instance network info cache for port e4e04bfe-9037-47b9-beb1-059047be1cde. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1094.850940] env[62522]: DEBUG nova.network.neutron [req-5f0bb6da-d105-4de5-9da0-17896284f560 req-569dd12b-227e-4bce-8c0a-f36f9c273257 service nova] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Updating instance_info_cache with network_info: [{"id": "e4e04bfe-9037-47b9-beb1-059047be1cde", "address": "fa:16:3e:c8:ba:dd", "network": {"id": "21d0ca22-5509-4f9b-b167-f9fde2dac808", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-547933771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a06421250694a98b13ff34ad816dc75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4e04bfe-90", "ovs_interfaceid": "e4e04bfe-9037-47b9-beb1-059047be1cde", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1095.009065] env[62522]: DEBUG oslo_vmware.api [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52076eac-9770-c8d0-ec2f-69abac588bbd, 'name': SearchDatastore_Task, 'duration_secs': 0.011546} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.009840] env[62522]: DEBUG oslo_concurrency.lockutils [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1095.010521] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1095.011100] env[62522]: DEBUG oslo_concurrency.lockutils [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1095.011293] env[62522]: DEBUG oslo_concurrency.lockutils [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1095.011639] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1095.014862] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-db662a95-a6e0-41b9-af8b-4640bb47e004 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.023750] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1095.023942] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1095.024690] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-748c8d7e-8164-42a0-b3aa-adbe9e43eb84 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.031825] env[62522]: DEBUG oslo_vmware.api [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1095.031825] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526867ba-2c87-07fb-358d-b9a4c9824738" [ 1095.031825] env[62522]: _type = "Task" [ 1095.031825] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.049835] env[62522]: DEBUG oslo_vmware.api [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526867ba-2c87-07fb-358d-b9a4c9824738, 'name': SearchDatastore_Task, 'duration_secs': 0.010291} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.049835] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e46ff74-5196-4951-b881-b70c9e6ee14a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.057269] env[62522]: DEBUG oslo_vmware.api [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1095.057269] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526e8240-da9e-fcd0-eaf1-d24e5f831c93" [ 1095.057269] env[62522]: _type = "Task" [ 1095.057269] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.074304] env[62522]: DEBUG oslo_vmware.api [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526e8240-da9e-fcd0-eaf1-d24e5f831c93, 'name': SearchDatastore_Task, 'duration_secs': 0.010692} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.078045] env[62522]: DEBUG oslo_concurrency.lockutils [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1095.078045] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] e369d9e1-1345-4038-b5f3-f816fe767a72/e369d9e1-1345-4038-b5f3-f816fe767a72.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1095.078045] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2a48bdf5-c8b4-4bd8-99f3-4040e3cdab46 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.086099] env[62522]: DEBUG oslo_vmware.api [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1095.086099] env[62522]: value = "task-2416157" [ 1095.086099] env[62522]: _type = "Task" [ 1095.086099] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.095305] env[62522]: DEBUG oslo_vmware.api [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416157, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.097948] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a124da71-a41a-42a7-a895-648ddb690a3b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.105487] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-696b2b1a-9d39-456a-ab74-78a8ed90e25e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.892559] env[62522]: DEBUG oslo_concurrency.lockutils [req-5f0bb6da-d105-4de5-9da0-17896284f560 req-569dd12b-227e-4bce-8c0a-f36f9c273257 service nova] Releasing lock "refresh_cache-e369d9e1-1345-4038-b5f3-f816fe767a72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1095.899944] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-078a0935-0ac6-4864-a6b9-1a77a98277ce {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.908905] env[62522]: DEBUG oslo_vmware.api [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416156, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.199384} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.914112] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1095.914407] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1095.914666] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1095.914918] env[62522]: INFO nova.compute.manager [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Took 1.78 seconds to destroy the instance on the hypervisor. [ 1095.915249] env[62522]: DEBUG oslo.service.loopingcall [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1095.915586] env[62522]: DEBUG oslo_vmware.api [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416157, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.666257} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.916779] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1095.917035] env[62522]: DEBUG nova.compute.manager [-] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1095.917183] env[62522]: DEBUG nova.network.neutron [-] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1095.919822] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2637f14-73d3-41a6-9f69-0a0db506fc57 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.924145] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] e369d9e1-1345-4038-b5f3-f816fe767a72/e369d9e1-1345-4038-b5f3-f816fe767a72.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1095.924427] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1095.924739] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3e226507-b135-40a0-8cd7-ad50839dd09b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.940349] env[62522]: DEBUG nova.compute.provider_tree [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1095.941066] env[62522]: DEBUG oslo_vmware.api [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1095.941066] env[62522]: value = "task-2416158" [ 1095.941066] env[62522]: _type = "Task" [ 1095.941066] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.952180] env[62522]: DEBUG oslo_vmware.api [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416158, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.323228] env[62522]: DEBUG nova.compute.manager [req-ce8f8fb4-1637-40a9-ab8d-667e6e51ec23 req-77096dc7-05dd-438c-b8c3-634a37722ccc service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Received event network-vif-plugged-55c5c37a-1605-4edb-957e-04160d41ff01 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1096.325255] env[62522]: DEBUG oslo_concurrency.lockutils [req-ce8f8fb4-1637-40a9-ab8d-667e6e51ec23 req-77096dc7-05dd-438c-b8c3-634a37722ccc service nova] Acquiring lock "892926ef-3044-497c-8fc8-30cd298e4311-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1096.325625] env[62522]: DEBUG oslo_concurrency.lockutils [req-ce8f8fb4-1637-40a9-ab8d-667e6e51ec23 req-77096dc7-05dd-438c-b8c3-634a37722ccc service nova] Lock "892926ef-3044-497c-8fc8-30cd298e4311-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1096.326432] env[62522]: DEBUG oslo_concurrency.lockutils [req-ce8f8fb4-1637-40a9-ab8d-667e6e51ec23 req-77096dc7-05dd-438c-b8c3-634a37722ccc service nova] Lock "892926ef-3044-497c-8fc8-30cd298e4311-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1096.326755] env[62522]: DEBUG nova.compute.manager [req-ce8f8fb4-1637-40a9-ab8d-667e6e51ec23 req-77096dc7-05dd-438c-b8c3-634a37722ccc service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] No waiting events found dispatching network-vif-plugged-55c5c37a-1605-4edb-957e-04160d41ff01 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1096.327094] env[62522]: WARNING nova.compute.manager [req-ce8f8fb4-1637-40a9-ab8d-667e6e51ec23 req-77096dc7-05dd-438c-b8c3-634a37722ccc service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Received unexpected event network-vif-plugged-55c5c37a-1605-4edb-957e-04160d41ff01 for instance with vm_state building and task_state spawning. [ 1096.396105] env[62522]: DEBUG nova.compute.manager [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1096.420029] env[62522]: DEBUG nova.network.neutron [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Successfully updated port: 55c5c37a-1605-4edb-957e-04160d41ff01 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1096.430193] env[62522]: DEBUG nova.virt.hardware [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1096.432410] env[62522]: DEBUG nova.virt.hardware [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1096.432513] env[62522]: DEBUG nova.virt.hardware [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1096.432651] env[62522]: DEBUG nova.virt.hardware [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1096.432799] env[62522]: DEBUG nova.virt.hardware [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1096.432944] env[62522]: DEBUG nova.virt.hardware [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1096.433540] env[62522]: DEBUG nova.virt.hardware [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1096.433790] env[62522]: DEBUG nova.virt.hardware [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1096.434034] env[62522]: DEBUG nova.virt.hardware [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1096.434216] env[62522]: DEBUG nova.virt.hardware [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1096.435815] env[62522]: DEBUG nova.virt.hardware [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1096.436976] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f5b3d64-e17e-410b-891e-cd0e6be4f974 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.444673] env[62522]: DEBUG nova.scheduler.client.report [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1096.458852] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ffc7bb-2c37-46e0-a66e-752b868b43a9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.478425] env[62522]: DEBUG oslo_vmware.api [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416158, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07118} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.478828] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1096.480217] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08752ab2-e447-4386-bbf2-3ad1d3584569 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.507565] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] e369d9e1-1345-4038-b5f3-f816fe767a72/e369d9e1-1345-4038-b5f3-f816fe767a72.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1096.507565] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07a81c1d-6eab-4e78-a87f-4e6031bd2190 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.523532] env[62522]: DEBUG nova.compute.manager [req-505e7cd2-5f4a-4317-90b4-b01baaa19cb5 req-eb516c66-db41-4209-bb52-68dcfffeeba9 service nova] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Received event network-vif-deleted-1b7d6d1b-0d3c-47b3-8d93-c8cdc0b1e00f {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1096.523815] env[62522]: INFO nova.compute.manager [req-505e7cd2-5f4a-4317-90b4-b01baaa19cb5 req-eb516c66-db41-4209-bb52-68dcfffeeba9 service nova] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Neutron deleted interface 1b7d6d1b-0d3c-47b3-8d93-c8cdc0b1e00f; detaching it from the instance and deleting it from the info cache [ 1096.524045] env[62522]: DEBUG nova.network.neutron [req-505e7cd2-5f4a-4317-90b4-b01baaa19cb5 req-eb516c66-db41-4209-bb52-68dcfffeeba9 service nova] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1096.533249] env[62522]: DEBUG oslo_vmware.api [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1096.533249] env[62522]: value = "task-2416159" [ 1096.533249] env[62522]: _type = "Task" [ 1096.533249] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.542799] env[62522]: DEBUG oslo_vmware.api [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416159, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.926316] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "refresh_cache-892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1096.926316] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired lock "refresh_cache-892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.926316] env[62522]: DEBUG nova.network.neutron [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1097.087697] env[62522]: DEBUG oslo_concurrency.lockutils [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.148s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1097.087697] env[62522]: DEBUG nova.compute.manager [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1097.087697] env[62522]: DEBUG oslo_concurrency.lockutils [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.640s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1097.087697] env[62522]: INFO nova.compute.claims [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1097.087697] env[62522]: DEBUG nova.network.neutron [-] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1097.087697] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d7b21e5b-456f-44d1-88c2-09e76893df74 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.087697] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf37bb3-c750-4fa6-9561-c085114e639f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.087697] env[62522]: DEBUG oslo_vmware.api [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416159, 'name': ReconfigVM_Task, 'duration_secs': 0.415217} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.087697] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Reconfigured VM instance instance-0000005d to attach disk [datastore2] e369d9e1-1345-4038-b5f3-f816fe767a72/e369d9e1-1345-4038-b5f3-f816fe767a72.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1097.087697] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-671b2f3b-e4f6-42aa-a9e0-e8040fc14fa2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.087697] env[62522]: DEBUG oslo_vmware.api [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1097.087697] env[62522]: value = "task-2416160" [ 1097.087697] env[62522]: _type = "Task" [ 1097.087697] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.087697] env[62522]: DEBUG nova.compute.manager [req-505e7cd2-5f4a-4317-90b4-b01baaa19cb5 req-eb516c66-db41-4209-bb52-68dcfffeeba9 service nova] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Detach interface failed, port_id=1b7d6d1b-0d3c-47b3-8d93-c8cdc0b1e00f, reason: Instance 3c4c395c-0625-4569-990d-e2d4ad162c14 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1097.087697] env[62522]: DEBUG oslo_vmware.api [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416160, 'name': Rename_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.460575] env[62522]: DEBUG nova.network.neutron [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1097.584670] env[62522]: DEBUG oslo_vmware.api [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416160, 'name': Rename_Task, 'duration_secs': 0.193668} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.585014] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1097.585274] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9fd93733-c10a-47f6-8471-93697df38cd0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.593709] env[62522]: DEBUG nova.compute.utils [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1097.593880] env[62522]: INFO nova.compute.manager [-] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Took 1.68 seconds to deallocate network for instance. [ 1097.595635] env[62522]: DEBUG nova.compute.manager [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1097.595795] env[62522]: DEBUG nova.network.neutron [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1097.599181] env[62522]: DEBUG oslo_vmware.api [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1097.599181] env[62522]: value = "task-2416161" [ 1097.599181] env[62522]: _type = "Task" [ 1097.599181] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.622223] env[62522]: DEBUG oslo_vmware.api [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416161, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.663160] env[62522]: DEBUG nova.policy [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9694ee575d094ccf845eb57acf3e70c6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '00b27498c07344d1bf9cecefa0fca033', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1097.671900] env[62522]: DEBUG nova.network.neutron [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Updating instance_info_cache with network_info: [{"id": "55c5c37a-1605-4edb-957e-04160d41ff01", "address": "fa:16:3e:07:85:b9", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55c5c37a-16", "ovs_interfaceid": "55c5c37a-1605-4edb-957e-04160d41ff01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1097.986137] env[62522]: DEBUG nova.network.neutron [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Successfully created port: 68620e13-f57f-4573-a1d2-4092e26de22d {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1098.095228] env[62522]: DEBUG nova.compute.manager [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1098.105301] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1098.118030] env[62522]: DEBUG oslo_vmware.api [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416161, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.175176] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Releasing lock "refresh_cache-892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1098.175886] env[62522]: DEBUG nova.compute.manager [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Instance network_info: |[{"id": "55c5c37a-1605-4edb-957e-04160d41ff01", "address": "fa:16:3e:07:85:b9", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55c5c37a-16", "ovs_interfaceid": "55c5c37a-1605-4edb-957e-04160d41ff01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1098.176394] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:07:85:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee555dfd-3d1a-4220-89cd-ffba64e4acf0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '55c5c37a-1605-4edb-957e-04160d41ff01', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1098.184520] env[62522]: DEBUG oslo.service.loopingcall [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1098.188162] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1098.190153] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-74832814-b601-490e-9548-8ae804343441 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.217608] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1098.217608] env[62522]: value = "task-2416162" [ 1098.217608] env[62522]: _type = "Task" [ 1098.217608] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.229344] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416162, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.347855] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-477b57a8-a3a5-4624-8935-b1df392e1812 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.356999] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53d2802f-b3aa-4012-872f-79ab955864c2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.394696] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d66206d5-eab9-4ea1-b1a2-0c2558b2dd1d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.405962] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-807b2fdb-a0e8-4433-a737-87edafde96f7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.423477] env[62522]: DEBUG nova.compute.provider_tree [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1098.516092] env[62522]: DEBUG nova.compute.manager [req-a1c978f2-6f1e-4115-aae9-4d871a44347b req-a165d00e-ca0b-4976-aa3f-468fc233894e service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Received event network-changed-55c5c37a-1605-4edb-957e-04160d41ff01 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1098.516361] env[62522]: DEBUG nova.compute.manager [req-a1c978f2-6f1e-4115-aae9-4d871a44347b req-a165d00e-ca0b-4976-aa3f-468fc233894e service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Refreshing instance network info cache due to event network-changed-55c5c37a-1605-4edb-957e-04160d41ff01. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1098.516638] env[62522]: DEBUG oslo_concurrency.lockutils [req-a1c978f2-6f1e-4115-aae9-4d871a44347b req-a165d00e-ca0b-4976-aa3f-468fc233894e service nova] Acquiring lock "refresh_cache-892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1098.516826] env[62522]: DEBUG oslo_concurrency.lockutils [req-a1c978f2-6f1e-4115-aae9-4d871a44347b req-a165d00e-ca0b-4976-aa3f-468fc233894e service nova] Acquired lock "refresh_cache-892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.517244] env[62522]: DEBUG nova.network.neutron [req-a1c978f2-6f1e-4115-aae9-4d871a44347b req-a165d00e-ca0b-4976-aa3f-468fc233894e service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Refreshing network info cache for port 55c5c37a-1605-4edb-957e-04160d41ff01 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1098.618281] env[62522]: DEBUG oslo_vmware.api [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416161, 'name': PowerOnVM_Task, 'duration_secs': 0.586451} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.618575] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1098.618780] env[62522]: INFO nova.compute.manager [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Took 6.90 seconds to spawn the instance on the hypervisor. [ 1098.618990] env[62522]: DEBUG nova.compute.manager [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1098.619771] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a4d5708-569c-4c27-97c3-f237ddf5f8a5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.728555] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416162, 'name': CreateVM_Task, 'duration_secs': 0.38756} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.729096] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1098.729959] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1098.729959] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.730391] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1098.731429] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-057b1b1d-61c3-446b-8b4a-88e2f4ff3814 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.737147] env[62522]: DEBUG oslo_vmware.api [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 1098.737147] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5244af82-2347-8a15-8832-91cb13a28929" [ 1098.737147] env[62522]: _type = "Task" [ 1098.737147] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.746813] env[62522]: DEBUG oslo_vmware.api [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5244af82-2347-8a15-8832-91cb13a28929, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.834274] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Acquiring lock "981a4839-28d0-4d91-88cd-99c1d263ca4d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1098.834510] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Lock "981a4839-28d0-4d91-88cd-99c1d263ca4d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1098.927520] env[62522]: DEBUG nova.scheduler.client.report [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1099.114155] env[62522]: DEBUG nova.compute.manager [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1099.143187] env[62522]: INFO nova.compute.manager [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Took 16.69 seconds to build instance. [ 1099.149699] env[62522]: DEBUG nova.virt.hardware [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1099.149966] env[62522]: DEBUG nova.virt.hardware [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1099.150158] env[62522]: DEBUG nova.virt.hardware [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1099.150361] env[62522]: DEBUG nova.virt.hardware [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1099.150508] env[62522]: DEBUG nova.virt.hardware [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1099.150674] env[62522]: DEBUG nova.virt.hardware [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1099.150880] env[62522]: DEBUG nova.virt.hardware [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1099.151046] env[62522]: DEBUG nova.virt.hardware [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1099.151264] env[62522]: DEBUG nova.virt.hardware [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1099.151441] env[62522]: DEBUG nova.virt.hardware [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1099.151611] env[62522]: DEBUG nova.virt.hardware [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1099.152533] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fe38a3c-7548-4111-8ac1-960b1e0f4905 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.166314] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7910661-258b-4a4c-8d00-402e28966143 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.249651] env[62522]: DEBUG oslo_vmware.api [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5244af82-2347-8a15-8832-91cb13a28929, 'name': SearchDatastore_Task, 'duration_secs': 0.013116} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.250079] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1099.250424] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1099.250676] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1099.250825] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.251013] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1099.251290] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-76b712dd-6afe-4839-bc46-619752f67117 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.261677] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1099.261863] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1099.264881] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bace1684-67d8-4fd9-bfff-a38c51159a74 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.270717] env[62522]: DEBUG oslo_vmware.api [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 1099.270717] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]524d56ae-9dac-e278-ab57-34514ec44e85" [ 1099.270717] env[62522]: _type = "Task" [ 1099.270717] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.280076] env[62522]: DEBUG oslo_vmware.api [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]524d56ae-9dac-e278-ab57-34514ec44e85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.302792] env[62522]: DEBUG nova.network.neutron [req-a1c978f2-6f1e-4115-aae9-4d871a44347b req-a165d00e-ca0b-4976-aa3f-468fc233894e service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Updated VIF entry in instance network info cache for port 55c5c37a-1605-4edb-957e-04160d41ff01. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1099.303253] env[62522]: DEBUG nova.network.neutron [req-a1c978f2-6f1e-4115-aae9-4d871a44347b req-a165d00e-ca0b-4976-aa3f-468fc233894e service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Updating instance_info_cache with network_info: [{"id": "55c5c37a-1605-4edb-957e-04160d41ff01", "address": "fa:16:3e:07:85:b9", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55c5c37a-16", "ovs_interfaceid": "55c5c37a-1605-4edb-957e-04160d41ff01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.336437] env[62522]: DEBUG nova.compute.manager [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1099.432868] env[62522]: DEBUG oslo_concurrency.lockutils [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.476s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1099.433433] env[62522]: DEBUG nova.compute.manager [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1099.437021] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.197s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1099.437118] env[62522]: DEBUG nova.objects.instance [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lazy-loading 'resources' on Instance uuid cd69a052-369b-4809-baf0-a1aec44f4ab5 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1099.646323] env[62522]: DEBUG oslo_concurrency.lockutils [None req-962d6d3c-dbcd-4e36-ac09-b48c7da1d18d tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "e369d9e1-1345-4038-b5f3-f816fe767a72" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.202s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1099.762144] env[62522]: DEBUG nova.network.neutron [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Successfully updated port: 68620e13-f57f-4573-a1d2-4092e26de22d {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1099.783837] env[62522]: DEBUG oslo_vmware.api [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]524d56ae-9dac-e278-ab57-34514ec44e85, 'name': SearchDatastore_Task, 'duration_secs': 0.012514} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.784741] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c67b038c-79ad-4dd7-9f1f-6f096c826996 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.791959] env[62522]: DEBUG oslo_vmware.api [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 1099.791959] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52844739-5f6f-f5c8-e119-6aab1873a1b2" [ 1099.791959] env[62522]: _type = "Task" [ 1099.791959] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.801015] env[62522]: DEBUG oslo_vmware.api [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52844739-5f6f-f5c8-e119-6aab1873a1b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.806695] env[62522]: DEBUG oslo_concurrency.lockutils [req-a1c978f2-6f1e-4115-aae9-4d871a44347b req-a165d00e-ca0b-4976-aa3f-468fc233894e service nova] Releasing lock "refresh_cache-892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1099.863985] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1099.943035] env[62522]: DEBUG nova.compute.utils [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1099.947699] env[62522]: DEBUG nova.compute.manager [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Not allocating networking since 'none' was specified. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1100.120019] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25dec34f-c4a0-40fd-b0e5-007a5951dbea {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.129251] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16f60acf-94e4-4785-86ea-b234ad399ad1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.161153] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49d53c62-455b-4c59-8e4e-a24f726b8ccd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.169901] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68f14e84-3bcc-4c96-9235-886db6d77966 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.184366] env[62522]: DEBUG nova.compute.provider_tree [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1100.191419] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "e369d9e1-1345-4038-b5f3-f816fe767a72" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1100.191419] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "e369d9e1-1345-4038-b5f3-f816fe767a72" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1100.191419] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "e369d9e1-1345-4038-b5f3-f816fe767a72-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1100.191419] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "e369d9e1-1345-4038-b5f3-f816fe767a72-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1100.191419] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "e369d9e1-1345-4038-b5f3-f816fe767a72-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1100.193804] env[62522]: INFO nova.compute.manager [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Terminating instance [ 1100.265227] env[62522]: DEBUG oslo_concurrency.lockutils [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "refresh_cache-5426087f-3dd0-4796-aa46-6020a3bda4f5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1100.266036] env[62522]: DEBUG oslo_concurrency.lockutils [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquired lock "refresh_cache-5426087f-3dd0-4796-aa46-6020a3bda4f5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1100.266036] env[62522]: DEBUG nova.network.neutron [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1100.303449] env[62522]: DEBUG oslo_vmware.api [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52844739-5f6f-f5c8-e119-6aab1873a1b2, 'name': SearchDatastore_Task, 'duration_secs': 0.018117} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.303728] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1100.303987] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 892926ef-3044-497c-8fc8-30cd298e4311/892926ef-3044-497c-8fc8-30cd298e4311.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1100.304275] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1e49a3c3-1eff-4929-8297-5015261114ac {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.313216] env[62522]: DEBUG oslo_vmware.api [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 1100.313216] env[62522]: value = "task-2416163" [ 1100.313216] env[62522]: _type = "Task" [ 1100.313216] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.322608] env[62522]: DEBUG oslo_vmware.api [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416163, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.450190] env[62522]: DEBUG nova.compute.manager [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1100.548730] env[62522]: DEBUG nova.compute.manager [req-7f7d70b8-98e8-4d8b-9bcb-fc15d23e4e41 req-4814c446-3aed-47bf-b546-34c07bd5ef6c service nova] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Received event network-vif-plugged-68620e13-f57f-4573-a1d2-4092e26de22d {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1100.549014] env[62522]: DEBUG oslo_concurrency.lockutils [req-7f7d70b8-98e8-4d8b-9bcb-fc15d23e4e41 req-4814c446-3aed-47bf-b546-34c07bd5ef6c service nova] Acquiring lock "5426087f-3dd0-4796-aa46-6020a3bda4f5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1100.549206] env[62522]: DEBUG oslo_concurrency.lockutils [req-7f7d70b8-98e8-4d8b-9bcb-fc15d23e4e41 req-4814c446-3aed-47bf-b546-34c07bd5ef6c service nova] Lock "5426087f-3dd0-4796-aa46-6020a3bda4f5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1100.549338] env[62522]: DEBUG oslo_concurrency.lockutils [req-7f7d70b8-98e8-4d8b-9bcb-fc15d23e4e41 req-4814c446-3aed-47bf-b546-34c07bd5ef6c service nova] Lock "5426087f-3dd0-4796-aa46-6020a3bda4f5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1100.549504] env[62522]: DEBUG nova.compute.manager [req-7f7d70b8-98e8-4d8b-9bcb-fc15d23e4e41 req-4814c446-3aed-47bf-b546-34c07bd5ef6c service nova] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] No waiting events found dispatching network-vif-plugged-68620e13-f57f-4573-a1d2-4092e26de22d {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1100.549669] env[62522]: WARNING nova.compute.manager [req-7f7d70b8-98e8-4d8b-9bcb-fc15d23e4e41 req-4814c446-3aed-47bf-b546-34c07bd5ef6c service nova] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Received unexpected event network-vif-plugged-68620e13-f57f-4573-a1d2-4092e26de22d for instance with vm_state building and task_state spawning. [ 1100.549810] env[62522]: DEBUG nova.compute.manager [req-7f7d70b8-98e8-4d8b-9bcb-fc15d23e4e41 req-4814c446-3aed-47bf-b546-34c07bd5ef6c service nova] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Received event network-changed-68620e13-f57f-4573-a1d2-4092e26de22d {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1100.549962] env[62522]: DEBUG nova.compute.manager [req-7f7d70b8-98e8-4d8b-9bcb-fc15d23e4e41 req-4814c446-3aed-47bf-b546-34c07bd5ef6c service nova] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Refreshing instance network info cache due to event network-changed-68620e13-f57f-4573-a1d2-4092e26de22d. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1100.550158] env[62522]: DEBUG oslo_concurrency.lockutils [req-7f7d70b8-98e8-4d8b-9bcb-fc15d23e4e41 req-4814c446-3aed-47bf-b546-34c07bd5ef6c service nova] Acquiring lock "refresh_cache-5426087f-3dd0-4796-aa46-6020a3bda4f5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1100.688124] env[62522]: DEBUG nova.scheduler.client.report [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1100.697605] env[62522]: DEBUG nova.compute.manager [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1100.697698] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1100.698724] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b766bab4-4ae2-4c18-b4bb-39e31bc2cfd5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.711429] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1100.713035] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e62f8e0b-291c-4e0b-a752-12f2b772f5e4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.727842] env[62522]: DEBUG oslo_vmware.api [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1100.727842] env[62522]: value = "task-2416164" [ 1100.727842] env[62522]: _type = "Task" [ 1100.727842] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.741927] env[62522]: DEBUG oslo_vmware.api [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416164, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.801628] env[62522]: DEBUG nova.network.neutron [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1100.831659] env[62522]: DEBUG oslo_vmware.api [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416163, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.989701] env[62522]: DEBUG nova.network.neutron [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Updating instance_info_cache with network_info: [{"id": "68620e13-f57f-4573-a1d2-4092e26de22d", "address": "fa:16:3e:87:e1:e1", "network": {"id": "2037da36-cd13-4cb1-95f4-08d1e174336c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1895994075-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00b27498c07344d1bf9cecefa0fca033", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68620e13-f5", "ovs_interfaceid": "68620e13-f57f-4573-a1d2-4092e26de22d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1101.194098] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.757s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1101.196386] env[62522]: DEBUG oslo_concurrency.lockutils [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.407s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1101.196958] env[62522]: DEBUG nova.objects.instance [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lazy-loading 'resources' on Instance uuid 9337449d-5aff-4170-83ea-42fe2e9d1657 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1101.218584] env[62522]: INFO nova.scheduler.client.report [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Deleted allocations for instance cd69a052-369b-4809-baf0-a1aec44f4ab5 [ 1101.239228] env[62522]: DEBUG oslo_vmware.api [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416164, 'name': PowerOffVM_Task, 'duration_secs': 0.302725} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.239537] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1101.239720] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1101.240727] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1a288b5e-7039-428e-9858-62fc475d45b0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.329708] env[62522]: DEBUG oslo_vmware.api [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416163, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.623184} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.330911] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 892926ef-3044-497c-8fc8-30cd298e4311/892926ef-3044-497c-8fc8-30cd298e4311.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1101.331229] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1101.331545] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1101.331730] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1101.331894] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Deleting the datastore file [datastore2] e369d9e1-1345-4038-b5f3-f816fe767a72 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1101.332184] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-990eaf66-3b74-4f75-b501-18681c3ffb3c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.334353] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-41c354ac-fc1c-4ccd-992e-3c0cf69364b5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.342562] env[62522]: DEBUG oslo_vmware.api [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 1101.342562] env[62522]: value = "task-2416167" [ 1101.342562] env[62522]: _type = "Task" [ 1101.342562] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.344124] env[62522]: DEBUG oslo_vmware.api [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1101.344124] env[62522]: value = "task-2416166" [ 1101.344124] env[62522]: _type = "Task" [ 1101.344124] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.359914] env[62522]: DEBUG oslo_vmware.api [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416166, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.360195] env[62522]: DEBUG oslo_vmware.api [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416167, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.463511] env[62522]: DEBUG nova.compute.manager [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1101.493355] env[62522]: DEBUG nova.virt.hardware [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1101.493652] env[62522]: DEBUG nova.virt.hardware [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1101.493829] env[62522]: DEBUG nova.virt.hardware [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1101.494081] env[62522]: DEBUG nova.virt.hardware [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1101.494271] env[62522]: DEBUG nova.virt.hardware [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1101.494481] env[62522]: DEBUG nova.virt.hardware [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1101.494774] env[62522]: DEBUG nova.virt.hardware [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1101.494985] env[62522]: DEBUG nova.virt.hardware [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1101.495217] env[62522]: DEBUG nova.virt.hardware [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1101.495458] env[62522]: DEBUG nova.virt.hardware [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1101.495702] env[62522]: DEBUG nova.virt.hardware [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1101.496316] env[62522]: DEBUG oslo_concurrency.lockutils [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Releasing lock "refresh_cache-5426087f-3dd0-4796-aa46-6020a3bda4f5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1101.496704] env[62522]: DEBUG nova.compute.manager [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Instance network_info: |[{"id": "68620e13-f57f-4573-a1d2-4092e26de22d", "address": "fa:16:3e:87:e1:e1", "network": {"id": "2037da36-cd13-4cb1-95f4-08d1e174336c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1895994075-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00b27498c07344d1bf9cecefa0fca033", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68620e13-f5", "ovs_interfaceid": "68620e13-f57f-4573-a1d2-4092e26de22d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1101.497761] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c143465f-4c9f-477c-bd9e-61f38512d931 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.501165] env[62522]: DEBUG oslo_concurrency.lockutils [req-7f7d70b8-98e8-4d8b-9bcb-fc15d23e4e41 req-4814c446-3aed-47bf-b546-34c07bd5ef6c service nova] Acquired lock "refresh_cache-5426087f-3dd0-4796-aa46-6020a3bda4f5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1101.501407] env[62522]: DEBUG nova.network.neutron [req-7f7d70b8-98e8-4d8b-9bcb-fc15d23e4e41 req-4814c446-3aed-47bf-b546-34c07bd5ef6c service nova] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Refreshing network info cache for port 68620e13-f57f-4573-a1d2-4092e26de22d {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1101.504206] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:87:e1:e1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f65996a3-f865-4492-9377-cd14ec8b3aae', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '68620e13-f57f-4573-a1d2-4092e26de22d', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1101.513480] env[62522]: DEBUG oslo.service.loopingcall [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1101.514654] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1101.515592] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0986cbd5-02fe-4213-9221-902366dfa734 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.537043] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a468f402-e5a3-46af-8b86-55a94c3c9f7d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.542879] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1101.542879] env[62522]: value = "task-2416168" [ 1101.542879] env[62522]: _type = "Task" [ 1101.542879] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.558750] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Instance VIF info [] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1101.564750] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Creating folder: Project (0eb4d8cecd784c6c9a602c88c8bae244). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1101.568335] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a5828140-11e0-4770-9736-2df50c30389e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.570545] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416168, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.584381] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Created folder: Project (0eb4d8cecd784c6c9a602c88c8bae244) in parent group-v489562. [ 1101.584625] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Creating folder: Instances. Parent ref: group-v489827. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1101.584910] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-25184aa5-b1bb-48b0-b92f-b6d0bdbca0b9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.599691] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Created folder: Instances in parent group-v489827. [ 1101.600072] env[62522]: DEBUG oslo.service.loopingcall [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1101.600316] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1101.600564] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c35cfe16-ec95-414b-90c5-8f536a923628 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.619556] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1101.619556] env[62522]: value = "task-2416171" [ 1101.619556] env[62522]: _type = "Task" [ 1101.619556] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.629023] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416171, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.729174] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8d4c2766-353b-4f3c-8d92-97741197d169 tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "cd69a052-369b-4809-baf0-a1aec44f4ab5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.946s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1101.858174] env[62522]: DEBUG oslo_vmware.api [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416167, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.328046} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.859216] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1101.860573] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-268a5335-8f93-401b-81ad-b6e6c000e0fa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.867307] env[62522]: DEBUG oslo_vmware.api [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416166, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.389799} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.868049] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1101.868349] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1101.868611] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1101.868888] env[62522]: INFO nova.compute.manager [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1101.869170] env[62522]: DEBUG oslo.service.loopingcall [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1101.872298] env[62522]: DEBUG nova.compute.manager [-] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1101.872442] env[62522]: DEBUG nova.network.neutron [-] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1101.895942] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 892926ef-3044-497c-8fc8-30cd298e4311/892926ef-3044-497c-8fc8-30cd298e4311.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1101.899624] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8df72715-fdb9-4a8f-847e-a21d1606f652 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.932023] env[62522]: DEBUG oslo_vmware.api [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 1101.932023] env[62522]: value = "task-2416172" [ 1101.932023] env[62522]: _type = "Task" [ 1101.932023] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.933686] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d19c91a-1f19-45be-a59a-caed218c03ca {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.952056] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5be61ff-5eba-4dab-803f-ce80f7f0d0a7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.956580] env[62522]: DEBUG oslo_vmware.api [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416172, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.014873] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9508e6c9-db7d-4a69-b581-87f25bcdda1d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.032824] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e117f6d0-1d2e-47dc-b3f5-6dd502d7462f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.050052] env[62522]: DEBUG nova.compute.provider_tree [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1102.064132] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416168, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.131130] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416171, 'name': CreateVM_Task, 'duration_secs': 0.443365} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.131316] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1102.132128] env[62522]: DEBUG oslo_concurrency.lockutils [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1102.132128] env[62522]: DEBUG oslo_concurrency.lockutils [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.132264] env[62522]: DEBUG oslo_concurrency.lockutils [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1102.132835] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cfe0e7fa-b119-48ee-9146-f7ccc76f0843 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.138883] env[62522]: DEBUG oslo_vmware.api [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Waiting for the task: (returnval){ [ 1102.138883] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]527d721c-b567-32a6-3252-e451634f147a" [ 1102.138883] env[62522]: _type = "Task" [ 1102.138883] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.148347] env[62522]: DEBUG oslo_vmware.api [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]527d721c-b567-32a6-3252-e451634f147a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.423937] env[62522]: DEBUG nova.network.neutron [req-7f7d70b8-98e8-4d8b-9bcb-fc15d23e4e41 req-4814c446-3aed-47bf-b546-34c07bd5ef6c service nova] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Updated VIF entry in instance network info cache for port 68620e13-f57f-4573-a1d2-4092e26de22d. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1102.424409] env[62522]: DEBUG nova.network.neutron [req-7f7d70b8-98e8-4d8b-9bcb-fc15d23e4e41 req-4814c446-3aed-47bf-b546-34c07bd5ef6c service nova] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Updating instance_info_cache with network_info: [{"id": "68620e13-f57f-4573-a1d2-4092e26de22d", "address": "fa:16:3e:87:e1:e1", "network": {"id": "2037da36-cd13-4cb1-95f4-08d1e174336c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1895994075-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00b27498c07344d1bf9cecefa0fca033", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68620e13-f5", "ovs_interfaceid": "68620e13-f57f-4573-a1d2-4092e26de22d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1102.425947] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "c181ce48-9fe2-4400-9047-f8b5a7159dd3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1102.426216] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "c181ce48-9fe2-4400-9047-f8b5a7159dd3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1102.426443] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "c181ce48-9fe2-4400-9047-f8b5a7159dd3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1102.427091] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "c181ce48-9fe2-4400-9047-f8b5a7159dd3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1102.427287] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "c181ce48-9fe2-4400-9047-f8b5a7159dd3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1102.430107] env[62522]: INFO nova.compute.manager [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Terminating instance [ 1102.444443] env[62522]: DEBUG oslo_vmware.api [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416172, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.556568] env[62522]: DEBUG nova.scheduler.client.report [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1102.563472] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416168, 'name': CreateVM_Task, 'duration_secs': 0.572864} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.564077] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1102.564869] env[62522]: DEBUG oslo_concurrency.lockutils [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1102.565554] env[62522]: DEBUG oslo_concurrency.lockutils [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.565554] env[62522]: DEBUG oslo_concurrency.lockutils [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1102.565668] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15e68add-72eb-4e75-9b68-0b1a479c7172 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.571948] env[62522]: DEBUG oslo_vmware.api [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1102.571948] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52eaebff-aff2-6069-3b63-4fb6860e049f" [ 1102.571948] env[62522]: _type = "Task" [ 1102.571948] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.582339] env[62522]: DEBUG nova.compute.manager [req-a2915363-01c0-4fb5-9889-a9eb4b3d314d req-8204456a-7044-4c1c-b230-7912da99b0e3 service nova] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Received event network-vif-deleted-e4e04bfe-9037-47b9-beb1-059047be1cde {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1102.582563] env[62522]: INFO nova.compute.manager [req-a2915363-01c0-4fb5-9889-a9eb4b3d314d req-8204456a-7044-4c1c-b230-7912da99b0e3 service nova] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Neutron deleted interface e4e04bfe-9037-47b9-beb1-059047be1cde; detaching it from the instance and deleting it from the info cache [ 1102.582750] env[62522]: DEBUG nova.network.neutron [req-a2915363-01c0-4fb5-9889-a9eb4b3d314d req-8204456a-7044-4c1c-b230-7912da99b0e3 service nova] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1102.587127] env[62522]: DEBUG oslo_vmware.api [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52eaebff-aff2-6069-3b63-4fb6860e049f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.650806] env[62522]: DEBUG oslo_vmware.api [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]527d721c-b567-32a6-3252-e451634f147a, 'name': SearchDatastore_Task, 'duration_secs': 0.010721} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.651125] env[62522]: DEBUG oslo_concurrency.lockutils [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1102.651366] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1102.651602] env[62522]: DEBUG oslo_concurrency.lockutils [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1102.651749] env[62522]: DEBUG oslo_concurrency.lockutils [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.651925] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1102.652248] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-04a0e41a-a80c-4fc2-93d9-7292afd699fd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.664249] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1102.664578] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1102.665796] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2cddad07-5378-48ad-b328-bdae7aafec97 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.674391] env[62522]: DEBUG oslo_vmware.api [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Waiting for the task: (returnval){ [ 1102.674391] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52cfc2f3-c688-34b7-d41a-81296969b1ea" [ 1102.674391] env[62522]: _type = "Task" [ 1102.674391] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.683956] env[62522]: DEBUG oslo_vmware.api [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52cfc2f3-c688-34b7-d41a-81296969b1ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.749687] env[62522]: DEBUG nova.network.neutron [-] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1102.930733] env[62522]: DEBUG oslo_concurrency.lockutils [req-7f7d70b8-98e8-4d8b-9bcb-fc15d23e4e41 req-4814c446-3aed-47bf-b546-34c07bd5ef6c service nova] Releasing lock "refresh_cache-5426087f-3dd0-4796-aa46-6020a3bda4f5" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1102.938811] env[62522]: DEBUG nova.compute.manager [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1102.939088] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1102.939971] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eb2da43-7488-4c29-8bf6-e2527032710d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.946673] env[62522]: DEBUG oslo_vmware.api [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416172, 'name': ReconfigVM_Task, 'duration_secs': 0.561441} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.947413] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 892926ef-3044-497c-8fc8-30cd298e4311/892926ef-3044-497c-8fc8-30cd298e4311.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1102.948104] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ff928df3-ae5d-4318-9189-0b163523b3e9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.951823] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1102.952456] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0ae5c986-27cb-431f-9821-a08115b354b7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.957724] env[62522]: DEBUG oslo_vmware.api [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 1102.957724] env[62522]: value = "task-2416173" [ 1102.957724] env[62522]: _type = "Task" [ 1102.957724] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.959122] env[62522]: DEBUG oslo_vmware.api [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 1102.959122] env[62522]: value = "task-2416174" [ 1102.959122] env[62522]: _type = "Task" [ 1102.959122] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.971925] env[62522]: DEBUG oslo_vmware.api [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416173, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.975210] env[62522]: DEBUG oslo_vmware.api [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2416174, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.065118] env[62522]: DEBUG oslo_concurrency.lockutils [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.869s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1103.067791] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.699s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1103.068063] env[62522]: DEBUG nova.objects.instance [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Lazy-loading 'resources' on Instance uuid 783d9ae7-67f5-4c54-81a7-6715b762afb3 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1103.084900] env[62522]: DEBUG oslo_vmware.api [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52eaebff-aff2-6069-3b63-4fb6860e049f, 'name': SearchDatastore_Task, 'duration_secs': 0.022997} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.085317] env[62522]: DEBUG oslo_concurrency.lockutils [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1103.085622] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1103.085986] env[62522]: DEBUG oslo_concurrency.lockutils [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1103.086144] env[62522]: DEBUG oslo_concurrency.lockutils [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1103.086345] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1103.086691] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95616dbc-34d4-44e9-a33a-e41f9a2f7ab6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.090055] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-72730ad0-273c-4e71-8903-f6f706369ffc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.093639] env[62522]: INFO nova.scheduler.client.report [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Deleted allocations for instance 9337449d-5aff-4170-83ea-42fe2e9d1657 [ 1103.102715] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e346846-2bab-40fc-bebf-7830cdafe978 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.116629] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1103.116629] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1103.117802] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27188670-23e8-46f8-a12b-d7a1a80e52b8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.124307] env[62522]: DEBUG oslo_vmware.api [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1103.124307] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52580c39-7bbb-af2e-debf-f267c2f28032" [ 1103.124307] env[62522]: _type = "Task" [ 1103.124307] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.139992] env[62522]: DEBUG nova.compute.manager [req-a2915363-01c0-4fb5-9889-a9eb4b3d314d req-8204456a-7044-4c1c-b230-7912da99b0e3 service nova] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Detach interface failed, port_id=e4e04bfe-9037-47b9-beb1-059047be1cde, reason: Instance e369d9e1-1345-4038-b5f3-f816fe767a72 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1103.143983] env[62522]: DEBUG oslo_vmware.api [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52580c39-7bbb-af2e-debf-f267c2f28032, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.187468] env[62522]: DEBUG oslo_vmware.api [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52cfc2f3-c688-34b7-d41a-81296969b1ea, 'name': SearchDatastore_Task, 'duration_secs': 0.017292} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.188615] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2dd00199-b6a8-484b-9231-f67fe95404d2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.194836] env[62522]: DEBUG oslo_vmware.api [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Waiting for the task: (returnval){ [ 1103.194836] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529f2ff4-531c-f3c2-f4b4-a4ad1fc76ca1" [ 1103.194836] env[62522]: _type = "Task" [ 1103.194836] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.204720] env[62522]: DEBUG oslo_vmware.api [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529f2ff4-531c-f3c2-f4b4-a4ad1fc76ca1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.252106] env[62522]: INFO nova.compute.manager [-] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Took 1.38 seconds to deallocate network for instance. [ 1103.476984] env[62522]: DEBUG oslo_vmware.api [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2416174, 'name': PowerOffVM_Task, 'duration_secs': 0.239477} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.477299] env[62522]: DEBUG oslo_vmware.api [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416173, 'name': Rename_Task, 'duration_secs': 0.161786} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.477570] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1103.477650] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1103.477903] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1103.478167] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8a2219e8-01a6-462a-a808-37ff3692104a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.479757] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9faa0958-740f-4c40-8495-54d807bf7e80 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.488419] env[62522]: DEBUG oslo_vmware.api [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 1103.488419] env[62522]: value = "task-2416175" [ 1103.488419] env[62522]: _type = "Task" [ 1103.488419] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.497785] env[62522]: DEBUG oslo_vmware.api [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416175, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.556174] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1103.556472] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1103.556762] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Deleting the datastore file [datastore2] c181ce48-9fe2-4400-9047-f8b5a7159dd3 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1103.557142] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2d3c4f45-5ab4-48c9-9b9c-ea5d9666a280 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.568621] env[62522]: DEBUG oslo_vmware.api [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for the task: (returnval){ [ 1103.568621] env[62522]: value = "task-2416177" [ 1103.568621] env[62522]: _type = "Task" [ 1103.568621] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.582201] env[62522]: DEBUG oslo_vmware.api [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2416177, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.605184] env[62522]: DEBUG oslo_concurrency.lockutils [None req-89769161-e113-4a43-8a07-e608032b1b3c tempest-ServerDiskConfigTestJSON-536235198 tempest-ServerDiskConfigTestJSON-536235198-project-member] Lock "9337449d-5aff-4170-83ea-42fe2e9d1657" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.385s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1103.637012] env[62522]: DEBUG oslo_vmware.api [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52580c39-7bbb-af2e-debf-f267c2f28032, 'name': SearchDatastore_Task, 'duration_secs': 0.018105} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.641589] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7c16f98-1df9-4f8d-bdea-255dcd2a9be2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.648905] env[62522]: DEBUG oslo_vmware.api [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1103.648905] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526cbb6e-1aac-fce0-0d74-31a13a610aaf" [ 1103.648905] env[62522]: _type = "Task" [ 1103.648905] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.659722] env[62522]: DEBUG oslo_vmware.api [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526cbb6e-1aac-fce0-0d74-31a13a610aaf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.707127] env[62522]: DEBUG oslo_vmware.api [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529f2ff4-531c-f3c2-f4b4-a4ad1fc76ca1, 'name': SearchDatastore_Task, 'duration_secs': 0.017232} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.709790] env[62522]: DEBUG oslo_concurrency.lockutils [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1103.710067] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 92604d35-7e59-45b0-9dce-32e515703936/92604d35-7e59-45b0-9dce-32e515703936.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1103.710523] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-23776b59-4ae0-43de-a43a-1d05dc6f9f6f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.718825] env[62522]: DEBUG oslo_vmware.api [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Waiting for the task: (returnval){ [ 1103.718825] env[62522]: value = "task-2416178" [ 1103.718825] env[62522]: _type = "Task" [ 1103.718825] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.731083] env[62522]: DEBUG oslo_vmware.api [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Task: {'id': task-2416178, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.749590] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f66b641-c9bf-4510-bb1f-f94607a07e31 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.758603] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1103.759679] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8077088a-2d3e-48f5-9495-cf1b4927c07a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.798424] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e842f3b-58bc-4c13-b2db-7eacb5cfb94b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.807702] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8986abc-14a8-40db-bc6f-831387f83d1b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.822603] env[62522]: DEBUG nova.compute.provider_tree [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1104.002304] env[62522]: DEBUG oslo_vmware.api [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416175, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.085260] env[62522]: DEBUG oslo_vmware.api [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Task: {'id': task-2416177, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.195512} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.086031] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1104.086188] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1104.086460] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1104.088464] env[62522]: INFO nova.compute.manager [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1104.088464] env[62522]: DEBUG oslo.service.loopingcall [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1104.088464] env[62522]: DEBUG nova.compute.manager [-] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1104.088464] env[62522]: DEBUG nova.network.neutron [-] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1104.129689] env[62522]: DEBUG oslo_vmware.rw_handles [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ac7f1d-a176-5526-4baa-18bc3de24bf2/disk-0.vmdk. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1104.130843] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f407b3-d333-4bb3-ba46-44b51bc0e52c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.138880] env[62522]: DEBUG oslo_vmware.rw_handles [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ac7f1d-a176-5526-4baa-18bc3de24bf2/disk-0.vmdk is in state: ready. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1104.139115] env[62522]: ERROR oslo_vmware.rw_handles [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ac7f1d-a176-5526-4baa-18bc3de24bf2/disk-0.vmdk due to incomplete transfer. [ 1104.139380] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-38086663-f411-4a6a-b049-94e084012ba4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.155150] env[62522]: DEBUG oslo_vmware.rw_handles [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ac7f1d-a176-5526-4baa-18bc3de24bf2/disk-0.vmdk. {{(pid=62522) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1104.155321] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Uploaded image 2e900327-5daf-401c-97e2-b519e8d896d0 to the Glance image server {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1104.158940] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Destroying the VM {{(pid=62522) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1104.159878] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-9589e0fe-c3d8-4abe-a0c9-c52acf5d4a46 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.169575] env[62522]: DEBUG oslo_vmware.api [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526cbb6e-1aac-fce0-0d74-31a13a610aaf, 'name': SearchDatastore_Task, 'duration_secs': 0.012462} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.171368] env[62522]: DEBUG oslo_concurrency.lockutils [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1104.171665] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 5426087f-3dd0-4796-aa46-6020a3bda4f5/5426087f-3dd0-4796-aa46-6020a3bda4f5.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1104.172219] env[62522]: DEBUG oslo_vmware.api [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1104.172219] env[62522]: value = "task-2416179" [ 1104.172219] env[62522]: _type = "Task" [ 1104.172219] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.172449] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b4067656-bf1c-4add-9211-bc1598297faa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.187430] env[62522]: DEBUG oslo_vmware.api [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416179, 'name': Destroy_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.189514] env[62522]: DEBUG oslo_vmware.api [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1104.189514] env[62522]: value = "task-2416180" [ 1104.189514] env[62522]: _type = "Task" [ 1104.189514] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.199663] env[62522]: DEBUG oslo_vmware.api [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416180, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.233438] env[62522]: DEBUG oslo_vmware.api [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Task: {'id': task-2416178, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513198} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.233738] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 92604d35-7e59-45b0-9dce-32e515703936/92604d35-7e59-45b0-9dce-32e515703936.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1104.233961] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1104.234248] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-84a8f8fe-d118-499d-b3ec-78cdaf78e296 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.242829] env[62522]: DEBUG oslo_vmware.api [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Waiting for the task: (returnval){ [ 1104.242829] env[62522]: value = "task-2416181" [ 1104.242829] env[62522]: _type = "Task" [ 1104.242829] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.254685] env[62522]: DEBUG oslo_vmware.api [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Task: {'id': task-2416181, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.325899] env[62522]: DEBUG nova.scheduler.client.report [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1104.398872] env[62522]: DEBUG nova.compute.manager [req-f56d3b42-6e8d-4cb7-a4b6-9d04d222db46 req-169d0e81-6b4e-49c0-ad7b-d05be96a9c59 service nova] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Received event network-vif-deleted-2e33c70f-036d-459c-a393-f570cbf7089c {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1104.400645] env[62522]: INFO nova.compute.manager [req-f56d3b42-6e8d-4cb7-a4b6-9d04d222db46 req-169d0e81-6b4e-49c0-ad7b-d05be96a9c59 service nova] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Neutron deleted interface 2e33c70f-036d-459c-a393-f570cbf7089c; detaching it from the instance and deleting it from the info cache [ 1104.400860] env[62522]: DEBUG nova.network.neutron [req-f56d3b42-6e8d-4cb7-a4b6-9d04d222db46 req-169d0e81-6b4e-49c0-ad7b-d05be96a9c59 service nova] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1104.506694] env[62522]: DEBUG oslo_vmware.api [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416175, 'name': PowerOnVM_Task, 'duration_secs': 0.692207} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.506694] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1104.506694] env[62522]: INFO nova.compute.manager [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Took 8.11 seconds to spawn the instance on the hypervisor. [ 1104.506694] env[62522]: DEBUG nova.compute.manager [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1104.508018] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12dc2de6-763b-4f6b-8b1d-64f4331d7abc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.688288] env[62522]: DEBUG oslo_vmware.api [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416179, 'name': Destroy_Task} progress is 33%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.701812] env[62522]: DEBUG oslo_vmware.api [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416180, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.754385] env[62522]: DEBUG oslo_vmware.api [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Task: {'id': task-2416181, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.135028} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.754683] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1104.755470] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-599eb6df-7627-48d2-9e13-3533e264565a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.775910] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] 92604d35-7e59-45b0-9dce-32e515703936/92604d35-7e59-45b0-9dce-32e515703936.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1104.776266] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f686a233-8d3d-4337-a83c-53dd37018b46 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.799024] env[62522]: DEBUG oslo_vmware.api [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Waiting for the task: (returnval){ [ 1104.799024] env[62522]: value = "task-2416182" [ 1104.799024] env[62522]: _type = "Task" [ 1104.799024] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.807946] env[62522]: DEBUG oslo_vmware.api [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Task: {'id': task-2416182, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.831446] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.764s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1104.833924] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.917s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1104.834172] env[62522]: DEBUG nova.objects.instance [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lazy-loading 'pci_requests' on Instance uuid c28d2907-5b59-4df8-91a8-4ba0f2047d89 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1104.865666] env[62522]: INFO nova.scheduler.client.report [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Deleted allocations for instance 783d9ae7-67f5-4c54-81a7-6715b762afb3 [ 1104.881962] env[62522]: DEBUG nova.network.neutron [-] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1104.905112] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dab7a57b-1003-451f-b480-57cf9528679c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.915958] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc7f0aae-e0ce-4733-a923-8d5c90d5746c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.948281] env[62522]: DEBUG nova.compute.manager [req-f56d3b42-6e8d-4cb7-a4b6-9d04d222db46 req-169d0e81-6b4e-49c0-ad7b-d05be96a9c59 service nova] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Detach interface failed, port_id=2e33c70f-036d-459c-a393-f570cbf7089c, reason: Instance c181ce48-9fe2-4400-9047-f8b5a7159dd3 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1105.043129] env[62522]: INFO nova.compute.manager [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Took 19.70 seconds to build instance. [ 1105.186346] env[62522]: DEBUG oslo_vmware.api [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416179, 'name': Destroy_Task, 'duration_secs': 0.628204} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.186602] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Destroyed the VM [ 1105.186834] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Deleting Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1105.187100] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-f77845f8-e5f6-4567-bad7-6df38ec328fe {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.195838] env[62522]: DEBUG oslo_vmware.api [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1105.195838] env[62522]: value = "task-2416183" [ 1105.195838] env[62522]: _type = "Task" [ 1105.195838] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.202024] env[62522]: DEBUG oslo_vmware.api [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416180, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.543914} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.202594] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 5426087f-3dd0-4796-aa46-6020a3bda4f5/5426087f-3dd0-4796-aa46-6020a3bda4f5.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1105.202836] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1105.203365] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ea93cdf0-f899-4bf7-b069-5ffa4fadd62b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.208681] env[62522]: DEBUG oslo_vmware.api [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416183, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.214364] env[62522]: DEBUG oslo_vmware.api [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1105.214364] env[62522]: value = "task-2416184" [ 1105.214364] env[62522]: _type = "Task" [ 1105.214364] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.223448] env[62522]: DEBUG oslo_vmware.api [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416184, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.310861] env[62522]: DEBUG oslo_vmware.api [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Task: {'id': task-2416182, 'name': ReconfigVM_Task, 'duration_secs': 0.368951} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.311215] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Reconfigured VM instance instance-00000060 to attach disk [datastore2] 92604d35-7e59-45b0-9dce-32e515703936/92604d35-7e59-45b0-9dce-32e515703936.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1105.311889] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8ba23851-2402-4d2a-9c5f-14f32f61b2b3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.320284] env[62522]: DEBUG oslo_vmware.api [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Waiting for the task: (returnval){ [ 1105.320284] env[62522]: value = "task-2416185" [ 1105.320284] env[62522]: _type = "Task" [ 1105.320284] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.330666] env[62522]: DEBUG oslo_vmware.api [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Task: {'id': task-2416185, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.338981] env[62522]: DEBUG nova.objects.instance [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lazy-loading 'numa_topology' on Instance uuid c28d2907-5b59-4df8-91a8-4ba0f2047d89 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1105.374066] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e2045a25-cb80-45e4-a866-6979dc4cf804 tempest-ServerPasswordTestJSON-1205211578 tempest-ServerPasswordTestJSON-1205211578-project-member] Lock "783d9ae7-67f5-4c54-81a7-6715b762afb3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.471s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.384427] env[62522]: INFO nova.compute.manager [-] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Took 1.30 seconds to deallocate network for instance. [ 1105.548663] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a715d1be-aba2-4065-aa99-9ddcb9089641 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "892926ef-3044-497c-8fc8-30cd298e4311" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.218s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.708468] env[62522]: DEBUG oslo_vmware.api [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416183, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.724526] env[62522]: DEBUG oslo_vmware.api [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416184, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.135915} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.724868] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1105.725767] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a97074d-462b-4c53-a319-78115734d2a7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.756613] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] 5426087f-3dd0-4796-aa46-6020a3bda4f5/5426087f-3dd0-4796-aa46-6020a3bda4f5.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1105.756952] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99ec0788-8f13-401f-b641-64aa67c1a63f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.781921] env[62522]: DEBUG oslo_vmware.api [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1105.781921] env[62522]: value = "task-2416186" [ 1105.781921] env[62522]: _type = "Task" [ 1105.781921] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.793923] env[62522]: DEBUG oslo_vmware.api [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416186, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.831543] env[62522]: DEBUG oslo_vmware.api [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Task: {'id': task-2416185, 'name': Rename_Task, 'duration_secs': 0.173192} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.831867] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1105.832572] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f32d0121-5118-4e46-8cc7-74bced87eedc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.839616] env[62522]: DEBUG oslo_vmware.api [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Waiting for the task: (returnval){ [ 1105.839616] env[62522]: value = "task-2416187" [ 1105.839616] env[62522]: _type = "Task" [ 1105.839616] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.845486] env[62522]: INFO nova.compute.claims [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1105.853571] env[62522]: DEBUG oslo_vmware.api [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Task: {'id': task-2416187, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.893348] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1106.209298] env[62522]: DEBUG oslo_vmware.api [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416183, 'name': RemoveSnapshot_Task, 'duration_secs': 0.600875} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.209579] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Deleted Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1106.209813] env[62522]: INFO nova.compute.manager [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Took 16.36 seconds to snapshot the instance on the hypervisor. [ 1106.293981] env[62522]: DEBUG oslo_vmware.api [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416186, 'name': ReconfigVM_Task, 'duration_secs': 0.312028} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.294449] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Reconfigured VM instance instance-0000005f to attach disk [datastore1] 5426087f-3dd0-4796-aa46-6020a3bda4f5/5426087f-3dd0-4796-aa46-6020a3bda4f5.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1106.295493] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6c4976a3-fa8c-4d23-a2f9-43bd2e67e1e0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.305566] env[62522]: DEBUG oslo_vmware.api [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1106.305566] env[62522]: value = "task-2416188" [ 1106.305566] env[62522]: _type = "Task" [ 1106.305566] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.315309] env[62522]: DEBUG oslo_vmware.api [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416188, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.354062] env[62522]: DEBUG oslo_vmware.api [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Task: {'id': task-2416187, 'name': PowerOnVM_Task, 'duration_secs': 0.494103} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.354657] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1106.355016] env[62522]: INFO nova.compute.manager [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Took 4.89 seconds to spawn the instance on the hypervisor. [ 1106.355187] env[62522]: DEBUG nova.compute.manager [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1106.355982] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f17aae07-4c13-4b84-b89b-808a68735e71 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.589356] env[62522]: DEBUG nova.compute.manager [req-24bdacd4-025a-470b-a883-291132f40d02 req-759b46a9-237a-459c-abaa-747cd0a636c0 service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Received event network-changed-55c5c37a-1605-4edb-957e-04160d41ff01 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1106.589593] env[62522]: DEBUG nova.compute.manager [req-24bdacd4-025a-470b-a883-291132f40d02 req-759b46a9-237a-459c-abaa-747cd0a636c0 service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Refreshing instance network info cache due to event network-changed-55c5c37a-1605-4edb-957e-04160d41ff01. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1106.590251] env[62522]: DEBUG oslo_concurrency.lockutils [req-24bdacd4-025a-470b-a883-291132f40d02 req-759b46a9-237a-459c-abaa-747cd0a636c0 service nova] Acquiring lock "refresh_cache-892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1106.590432] env[62522]: DEBUG oslo_concurrency.lockutils [req-24bdacd4-025a-470b-a883-291132f40d02 req-759b46a9-237a-459c-abaa-747cd0a636c0 service nova] Acquired lock "refresh_cache-892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.591633] env[62522]: DEBUG nova.network.neutron [req-24bdacd4-025a-470b-a883-291132f40d02 req-759b46a9-237a-459c-abaa-747cd0a636c0 service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Refreshing network info cache for port 55c5c37a-1605-4edb-957e-04160d41ff01 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1106.768282] env[62522]: DEBUG nova.compute.manager [None req-3850bc78-86f2-4f61-a99f-a799617afee4 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Found 2 images (rotation: 2) {{(pid=62522) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1106.817689] env[62522]: DEBUG oslo_vmware.api [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416188, 'name': Rename_Task, 'duration_secs': 0.165961} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.817976] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1106.818262] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0e85d580-1f85-4d2d-80fc-4bc5ee99a60d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.832866] env[62522]: DEBUG oslo_vmware.api [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1106.832866] env[62522]: value = "task-2416189" [ 1106.832866] env[62522]: _type = "Task" [ 1106.832866] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.844924] env[62522]: DEBUG oslo_vmware.api [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416189, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.879248] env[62522]: INFO nova.compute.manager [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Took 18.59 seconds to build instance. [ 1107.041630] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d2b8025-e74d-4fa6-8547-ab6762147278 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.050829] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62706ff8-a05e-4944-a246-7d2b3e307416 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.094059] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e17055e-44d8-4087-9525-bb54ce81f79e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.104784] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0933bf0f-33bb-493e-a273-7cb55a26de9c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.120886] env[62522]: DEBUG nova.compute.provider_tree [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1107.222053] env[62522]: DEBUG oslo_concurrency.lockutils [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "93a2505e-814d-4809-90a9-0bc215406efd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1107.222440] env[62522]: DEBUG oslo_concurrency.lockutils [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "93a2505e-814d-4809-90a9-0bc215406efd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1107.343928] env[62522]: DEBUG oslo_vmware.api [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416189, 'name': PowerOnVM_Task, 'duration_secs': 0.493351} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.344360] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1107.344768] env[62522]: INFO nova.compute.manager [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Took 8.23 seconds to spawn the instance on the hypervisor. [ 1107.344908] env[62522]: DEBUG nova.compute.manager [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1107.346307] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4eb5379-abe5-4fab-b063-e4d9c1885d61 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.380173] env[62522]: DEBUG oslo_concurrency.lockutils [None req-adaae0a4-74c7-4d5a-a1fa-c38a80f13a97 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Lock "92604d35-7e59-45b0-9dce-32e515703936" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.104s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1107.490038] env[62522]: DEBUG nova.network.neutron [req-24bdacd4-025a-470b-a883-291132f40d02 req-759b46a9-237a-459c-abaa-747cd0a636c0 service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Updated VIF entry in instance network info cache for port 55c5c37a-1605-4edb-957e-04160d41ff01. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1107.490153] env[62522]: DEBUG nova.network.neutron [req-24bdacd4-025a-470b-a883-291132f40d02 req-759b46a9-237a-459c-abaa-747cd0a636c0 service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Updating instance_info_cache with network_info: [{"id": "55c5c37a-1605-4edb-957e-04160d41ff01", "address": "fa:16:3e:07:85:b9", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55c5c37a-16", "ovs_interfaceid": "55c5c37a-1605-4edb-957e-04160d41ff01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.624368] env[62522]: DEBUG nova.scheduler.client.report [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1107.726061] env[62522]: DEBUG nova.compute.manager [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1107.867110] env[62522]: INFO nova.compute.manager [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Took 20.18 seconds to build instance. [ 1107.874839] env[62522]: DEBUG nova.compute.manager [None req-ddef5c0c-d19b-4b80-b4c1-bffa05bfcf08 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1107.874839] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-093c3ef3-fc89-4a4c-9a50-ea253bdaabf0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.981973] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Acquiring lock "92604d35-7e59-45b0-9dce-32e515703936" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1107.982295] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Lock "92604d35-7e59-45b0-9dce-32e515703936" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1107.982927] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Acquiring lock "92604d35-7e59-45b0-9dce-32e515703936-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1107.982927] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Lock "92604d35-7e59-45b0-9dce-32e515703936-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1107.982927] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Lock "92604d35-7e59-45b0-9dce-32e515703936-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1107.985029] env[62522]: INFO nova.compute.manager [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Terminating instance [ 1107.992843] env[62522]: DEBUG oslo_concurrency.lockutils [req-24bdacd4-025a-470b-a883-291132f40d02 req-759b46a9-237a-459c-abaa-747cd0a636c0 service nova] Releasing lock "refresh_cache-892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1108.131234] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.297s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1108.135826] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.029s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1108.135826] env[62522]: DEBUG nova.objects.instance [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lazy-loading 'resources' on Instance uuid 3c4c395c-0625-4569-990d-e2d4ad162c14 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1108.193826] env[62522]: INFO nova.network.neutron [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Updating port 931dfe44-9ac3-4df4-a4ea-6c8612389451 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1108.254564] env[62522]: DEBUG oslo_concurrency.lockutils [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1108.369652] env[62522]: DEBUG oslo_concurrency.lockutils [None req-89db74e0-c9d3-4e97-a357-f8e288079a06 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "5426087f-3dd0-4796-aa46-6020a3bda4f5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.693s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1108.393019] env[62522]: INFO nova.compute.manager [None req-ddef5c0c-d19b-4b80-b4c1-bffa05bfcf08 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] instance snapshotting [ 1108.393019] env[62522]: DEBUG nova.objects.instance [None req-ddef5c0c-d19b-4b80-b4c1-bffa05bfcf08 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Lazy-loading 'flavor' on Instance uuid 92604d35-7e59-45b0-9dce-32e515703936 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1108.490106] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Acquiring lock "refresh_cache-92604d35-7e59-45b0-9dce-32e515703936" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1108.490302] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Acquired lock "refresh_cache-92604d35-7e59-45b0-9dce-32e515703936" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.490508] env[62522]: DEBUG nova.network.neutron [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1108.642322] env[62522]: DEBUG oslo_concurrency.lockutils [None req-dfbe66fd-8b1c-42c0-9c19-0ecd4b77468f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "7f8a8270-5014-446c-aa42-ea0b4079e5a9" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1108.643177] env[62522]: DEBUG oslo_concurrency.lockutils [None req-dfbe66fd-8b1c-42c0-9c19-0ecd4b77468f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "7f8a8270-5014-446c-aa42-ea0b4079e5a9" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1108.643177] env[62522]: DEBUG nova.compute.manager [None req-dfbe66fd-8b1c-42c0-9c19-0ecd4b77468f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1108.644341] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9652297-05a0-4923-bfad-00e7b1d1c9c0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.654062] env[62522]: DEBUG nova.compute.manager [None req-dfbe66fd-8b1c-42c0-9c19-0ecd4b77468f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62522) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1108.655320] env[62522]: DEBUG nova.objects.instance [None req-dfbe66fd-8b1c-42c0-9c19-0ecd4b77468f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lazy-loading 'flavor' on Instance uuid 7f8a8270-5014-446c-aa42-ea0b4079e5a9 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1108.816293] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0415350-0e7f-4e3b-bca3-5000a87bbfb1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.825255] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dbb5d3d-9723-48e5-862b-c71e717936db {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.857238] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f2ed8a-0c8a-41d6-b843-6d189b3d2a0c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.866454] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c9de501-ffd2-46cc-84a3-003f71756eb0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.881370] env[62522]: DEBUG nova.compute.provider_tree [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1108.896638] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-683eb537-8d8a-4815-9fdc-8f44bc6f58a9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.914932] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a7c813-83bd-4518-b44b-666b6d4ae487 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.952913] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2e193f7b-1e70-435c-a101-11d5fefbb532 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "5426087f-3dd0-4796-aa46-6020a3bda4f5" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1108.953210] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2e193f7b-1e70-435c-a101-11d5fefbb532 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "5426087f-3dd0-4796-aa46-6020a3bda4f5" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1108.953392] env[62522]: DEBUG nova.compute.manager [None req-2e193f7b-1e70-435c-a101-11d5fefbb532 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1108.954506] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f663d6f7-b30e-4391-bdd6-274c5435d5bf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.962865] env[62522]: DEBUG nova.compute.manager [None req-2e193f7b-1e70-435c-a101-11d5fefbb532 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62522) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1108.963451] env[62522]: DEBUG nova.objects.instance [None req-2e193f7b-1e70-435c-a101-11d5fefbb532 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lazy-loading 'flavor' on Instance uuid 5426087f-3dd0-4796-aa46-6020a3bda4f5 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1109.063743] env[62522]: DEBUG nova.network.neutron [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1109.144420] env[62522]: DEBUG nova.network.neutron [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.385234] env[62522]: DEBUG nova.scheduler.client.report [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1109.427019] env[62522]: DEBUG nova.compute.manager [None req-ddef5c0c-d19b-4b80-b4c1-bffa05bfcf08 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Instance disappeared during snapshot {{(pid=62522) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 1109.553796] env[62522]: DEBUG nova.compute.manager [None req-ddef5c0c-d19b-4b80-b4c1-bffa05bfcf08 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Found 0 images (rotation: 2) {{(pid=62522) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1109.647776] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Releasing lock "refresh_cache-92604d35-7e59-45b0-9dce-32e515703936" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1109.648344] env[62522]: DEBUG nova.compute.manager [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1109.648641] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1109.649564] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9394bf4-5b55-48f6-b6f7-cb35dc57e048 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.660099] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1109.660389] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-50e228ac-1cc2-46ab-b9ad-d2b21d0e0190 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.666225] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfbe66fd-8b1c-42c0-9c19-0ecd4b77468f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1109.666225] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5f2392fa-5c74-44cb-b3fa-05088b80d9d2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.669613] env[62522]: DEBUG oslo_vmware.api [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Waiting for the task: (returnval){ [ 1109.669613] env[62522]: value = "task-2416190" [ 1109.669613] env[62522]: _type = "Task" [ 1109.669613] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.674373] env[62522]: DEBUG oslo_vmware.api [None req-dfbe66fd-8b1c-42c0-9c19-0ecd4b77468f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1109.674373] env[62522]: value = "task-2416191" [ 1109.674373] env[62522]: _type = "Task" [ 1109.674373] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.680468] env[62522]: DEBUG oslo_vmware.api [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Task: {'id': task-2416190, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.686509] env[62522]: DEBUG oslo_vmware.api [None req-dfbe66fd-8b1c-42c0-9c19-0ecd4b77468f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416191, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.891209] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.756s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1109.896019] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.030s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1109.896019] env[62522]: INFO nova.compute.claims [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1109.917019] env[62522]: INFO nova.scheduler.client.report [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Deleted allocations for instance 3c4c395c-0625-4569-990d-e2d4ad162c14 [ 1109.933838] env[62522]: DEBUG nova.compute.manager [req-821c2ffd-e36b-476f-876c-7f6a4d93f5a4 req-832070de-0159-4906-a548-910885ff97ba service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Received event network-vif-plugged-931dfe44-9ac3-4df4-a4ea-6c8612389451 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1109.933971] env[62522]: DEBUG oslo_concurrency.lockutils [req-821c2ffd-e36b-476f-876c-7f6a4d93f5a4 req-832070de-0159-4906-a548-910885ff97ba service nova] Acquiring lock "c28d2907-5b59-4df8-91a8-4ba0f2047d89-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1109.934589] env[62522]: DEBUG oslo_concurrency.lockutils [req-821c2ffd-e36b-476f-876c-7f6a4d93f5a4 req-832070de-0159-4906-a548-910885ff97ba service nova] Lock "c28d2907-5b59-4df8-91a8-4ba0f2047d89-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1109.934819] env[62522]: DEBUG oslo_concurrency.lockutils [req-821c2ffd-e36b-476f-876c-7f6a4d93f5a4 req-832070de-0159-4906-a548-910885ff97ba service nova] Lock "c28d2907-5b59-4df8-91a8-4ba0f2047d89-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1109.935134] env[62522]: DEBUG nova.compute.manager [req-821c2ffd-e36b-476f-876c-7f6a4d93f5a4 req-832070de-0159-4906-a548-910885ff97ba service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] No waiting events found dispatching network-vif-plugged-931dfe44-9ac3-4df4-a4ea-6c8612389451 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1109.935390] env[62522]: WARNING nova.compute.manager [req-821c2ffd-e36b-476f-876c-7f6a4d93f5a4 req-832070de-0159-4906-a548-910885ff97ba service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Received unexpected event network-vif-plugged-931dfe44-9ac3-4df4-a4ea-6c8612389451 for instance with vm_state shelved_offloaded and task_state spawning. [ 1109.978016] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e193f7b-1e70-435c-a101-11d5fefbb532 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1109.978337] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c669f950-8620-481d-8799-2e565d2675b0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.986861] env[62522]: DEBUG oslo_vmware.api [None req-2e193f7b-1e70-435c-a101-11d5fefbb532 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1109.986861] env[62522]: value = "task-2416192" [ 1109.986861] env[62522]: _type = "Task" [ 1109.986861] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.997422] env[62522]: DEBUG oslo_vmware.api [None req-2e193f7b-1e70-435c-a101-11d5fefbb532 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416192, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.072247] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "refresh_cache-c28d2907-5b59-4df8-91a8-4ba0f2047d89" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1110.072247] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquired lock "refresh_cache-c28d2907-5b59-4df8-91a8-4ba0f2047d89" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.072327] env[62522]: DEBUG nova.network.neutron [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1110.182308] env[62522]: DEBUG oslo_vmware.api [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Task: {'id': task-2416190, 'name': PowerOffVM_Task, 'duration_secs': 0.16375} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.183037] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1110.183942] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1110.183942] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4d1396ad-0137-4958-a3a6-6fc1da8bd807 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.189609] env[62522]: DEBUG oslo_vmware.api [None req-dfbe66fd-8b1c-42c0-9c19-0ecd4b77468f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416191, 'name': PowerOffVM_Task, 'duration_secs': 0.269171} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.189609] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfbe66fd-8b1c-42c0-9c19-0ecd4b77468f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1110.189749] env[62522]: DEBUG nova.compute.manager [None req-dfbe66fd-8b1c-42c0-9c19-0ecd4b77468f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1110.190696] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d92554e3-43ba-414e-abba-0f14e76e2887 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.220027] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1110.220027] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1110.220027] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Deleting the datastore file [datastore2] 92604d35-7e59-45b0-9dce-32e515703936 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1110.220027] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-02eaa6cc-a9ef-4943-b953-199caa8a86e0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.226871] env[62522]: DEBUG oslo_vmware.api [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Waiting for the task: (returnval){ [ 1110.226871] env[62522]: value = "task-2416194" [ 1110.226871] env[62522]: _type = "Task" [ 1110.226871] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.236423] env[62522]: DEBUG oslo_vmware.api [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Task: {'id': task-2416194, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.426291] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d404d08-cb35-48f8-ad85-c2fc1ef439a7 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "3c4c395c-0625-4569-990d-e2d4ad162c14" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.801s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1110.501209] env[62522]: DEBUG oslo_vmware.api [None req-2e193f7b-1e70-435c-a101-11d5fefbb532 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416192, 'name': PowerOffVM_Task, 'duration_secs': 0.194205} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.501324] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e193f7b-1e70-435c-a101-11d5fefbb532 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1110.505019] env[62522]: DEBUG nova.compute.manager [None req-2e193f7b-1e70-435c-a101-11d5fefbb532 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1110.505019] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0612034d-5f77-4379-a7e3-85ac292d1cf9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.710441] env[62522]: DEBUG oslo_concurrency.lockutils [None req-dfbe66fd-8b1c-42c0-9c19-0ecd4b77468f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "7f8a8270-5014-446c-aa42-ea0b4079e5a9" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.067s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1110.737889] env[62522]: DEBUG oslo_vmware.api [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Task: {'id': task-2416194, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1065} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.738160] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1110.738396] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1110.738539] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1110.738710] env[62522]: INFO nova.compute.manager [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Took 1.09 seconds to destroy the instance on the hypervisor. [ 1110.738953] env[62522]: DEBUG oslo.service.loopingcall [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1110.739472] env[62522]: DEBUG nova.compute.manager [-] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1110.739584] env[62522]: DEBUG nova.network.neutron [-] [instance: 92604d35-7e59-45b0-9dce-32e515703936] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1110.760190] env[62522]: DEBUG nova.network.neutron [-] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1110.934114] env[62522]: DEBUG nova.network.neutron [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Updating instance_info_cache with network_info: [{"id": "931dfe44-9ac3-4df4-a4ea-6c8612389451", "address": "fa:16:3e:f5:6d:32", "network": {"id": "949f3536-8a7e-4edf-b6cc-6a264fe5fe83", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1891232839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f93394feaa4f4b61a5d3d670d32ec599", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap931dfe44-9a", "ovs_interfaceid": "931dfe44-9ac3-4df4-a4ea-6c8612389451", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1111.019381] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2e193f7b-1e70-435c-a101-11d5fefbb532 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "5426087f-3dd0-4796-aa46-6020a3bda4f5" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.066s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.094708] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2043776-4ea3-41bd-a7b0-0593be30ebfe {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.106271] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-218560bc-c6cb-4f95-aee0-a511589e8bfd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.143229] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac5fb03c-9da3-45bf-a139-45b4b5f8a155 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.152827] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b9a4c16-fa57-4863-9fe1-e7cf0d908b1c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.162573] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "b31195c2-29f4-475c-baa7-fcb4791b7278" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.162809] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "b31195c2-29f4-475c-baa7-fcb4791b7278" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1111.177177] env[62522]: DEBUG nova.compute.provider_tree [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1111.266203] env[62522]: DEBUG nova.network.neutron [-] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1111.436838] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Releasing lock "refresh_cache-c28d2907-5b59-4df8-91a8-4ba0f2047d89" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1111.458832] env[62522]: DEBUG nova.virt.hardware [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='41b7595777d0a5affd26abdc33305eaa',container_format='bare',created_at=2025-02-10T12:26:10Z,direct_url=,disk_format='vmdk',id=61bbb676-eb4a-448b-9d8a-abdf1c9af6ab,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1868665153-shelved',owner='f93394feaa4f4b61a5d3d670d32ec599',properties=ImageMetaProps,protected=,size=31670784,status='active',tags=,updated_at=2025-02-10T12:26:26Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1111.459086] env[62522]: DEBUG nova.virt.hardware [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1111.459254] env[62522]: DEBUG nova.virt.hardware [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1111.459438] env[62522]: DEBUG nova.virt.hardware [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1111.459582] env[62522]: DEBUG nova.virt.hardware [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1111.459726] env[62522]: DEBUG nova.virt.hardware [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1111.459928] env[62522]: DEBUG nova.virt.hardware [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1111.460103] env[62522]: DEBUG nova.virt.hardware [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1111.460315] env[62522]: DEBUG nova.virt.hardware [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1111.460487] env[62522]: DEBUG nova.virt.hardware [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1111.460657] env[62522]: DEBUG nova.virt.hardware [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1111.461896] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c072721b-41ed-4652-be62-254cba9666f8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.470468] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41be2c56-da92-400f-911d-7cdb6e67eed0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.484737] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f5:6d:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '983826cf-6390-4ec6-bf97-30a1060947fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '931dfe44-9ac3-4df4-a4ea-6c8612389451', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1111.492817] env[62522]: DEBUG oslo.service.loopingcall [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1111.492994] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1111.493574] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-34c16333-09c2-443a-ba13-88146a4afdf0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.516040] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1111.516040] env[62522]: value = "task-2416195" [ 1111.516040] env[62522]: _type = "Task" [ 1111.516040] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.525364] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416195, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.680569] env[62522]: DEBUG nova.compute.manager [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1111.684035] env[62522]: DEBUG nova.scheduler.client.report [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1111.769415] env[62522]: INFO nova.compute.manager [-] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Took 1.03 seconds to deallocate network for instance. [ 1111.922382] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e2e922f6-87b0-4c66-b804-dc81e10b8897 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "5426087f-3dd0-4796-aa46-6020a3bda4f5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.922659] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e2e922f6-87b0-4c66-b804-dc81e10b8897 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "5426087f-3dd0-4796-aa46-6020a3bda4f5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1111.922878] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e2e922f6-87b0-4c66-b804-dc81e10b8897 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "5426087f-3dd0-4796-aa46-6020a3bda4f5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.923082] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e2e922f6-87b0-4c66-b804-dc81e10b8897 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "5426087f-3dd0-4796-aa46-6020a3bda4f5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1111.923260] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e2e922f6-87b0-4c66-b804-dc81e10b8897 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "5426087f-3dd0-4796-aa46-6020a3bda4f5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.925454] env[62522]: INFO nova.compute.manager [None req-e2e922f6-87b0-4c66-b804-dc81e10b8897 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Terminating instance [ 1111.932200] env[62522]: DEBUG nova.compute.manager [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Stashing vm_state: stopped {{(pid=62522) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1111.965528] env[62522]: DEBUG nova.compute.manager [req-af39639d-798e-4271-821a-1bf91eb8a45f req-10393f9d-8c34-4efc-9ff0-c789954c2c81 service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Received event network-changed-931dfe44-9ac3-4df4-a4ea-6c8612389451 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1111.965772] env[62522]: DEBUG nova.compute.manager [req-af39639d-798e-4271-821a-1bf91eb8a45f req-10393f9d-8c34-4efc-9ff0-c789954c2c81 service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Refreshing instance network info cache due to event network-changed-931dfe44-9ac3-4df4-a4ea-6c8612389451. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1111.965989] env[62522]: DEBUG oslo_concurrency.lockutils [req-af39639d-798e-4271-821a-1bf91eb8a45f req-10393f9d-8c34-4efc-9ff0-c789954c2c81 service nova] Acquiring lock "refresh_cache-c28d2907-5b59-4df8-91a8-4ba0f2047d89" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1111.966156] env[62522]: DEBUG oslo_concurrency.lockutils [req-af39639d-798e-4271-821a-1bf91eb8a45f req-10393f9d-8c34-4efc-9ff0-c789954c2c81 service nova] Acquired lock "refresh_cache-c28d2907-5b59-4df8-91a8-4ba0f2047d89" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.966557] env[62522]: DEBUG nova.network.neutron [req-af39639d-798e-4271-821a-1bf91eb8a45f req-10393f9d-8c34-4efc-9ff0-c789954c2c81 service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Refreshing network info cache for port 931dfe44-9ac3-4df4-a4ea-6c8612389451 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1112.026863] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416195, 'name': CreateVM_Task, 'duration_secs': 0.365884} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.027054] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1112.027730] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/61bbb676-eb4a-448b-9d8a-abdf1c9af6ab" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1112.027900] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquired lock "[datastore2] devstack-image-cache_base/61bbb676-eb4a-448b-9d8a-abdf1c9af6ab" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1112.028283] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/61bbb676-eb4a-448b-9d8a-abdf1c9af6ab" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1112.028543] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b3f419d-59df-4bc4-9be0-72a6acfd8a10 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.034032] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1112.034032] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5214dc10-fa27-c501-23bd-10a72703a1f6" [ 1112.034032] env[62522]: _type = "Task" [ 1112.034032] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.042483] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5214dc10-fa27-c501-23bd-10a72703a1f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.190874] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.297s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1112.191269] env[62522]: DEBUG nova.compute.manager [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1112.195784] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.437s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1112.195987] env[62522]: DEBUG nova.objects.instance [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lazy-loading 'resources' on Instance uuid e369d9e1-1345-4038-b5f3-f816fe767a72 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1112.207484] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1112.278998] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1112.355435] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquiring lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1112.355662] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1112.433142] env[62522]: DEBUG nova.compute.manager [None req-e2e922f6-87b0-4c66-b804-dc81e10b8897 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1112.433481] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e922f6-87b0-4c66-b804-dc81e10b8897 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1112.434845] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e4bf3ea-f693-4229-80f6-78cf864c33ac {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.446109] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e922f6-87b0-4c66-b804-dc81e10b8897 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1112.446364] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-03b30829-5fe8-4a9f-ac42-39364ac3f6c7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.450564] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1112.511106] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e922f6-87b0-4c66-b804-dc81e10b8897 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1112.511343] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e922f6-87b0-4c66-b804-dc81e10b8897 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1112.511540] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2e922f6-87b0-4c66-b804-dc81e10b8897 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Deleting the datastore file [datastore1] 5426087f-3dd0-4796-aa46-6020a3bda4f5 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1112.511816] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a7842a52-acee-4e43-b11d-792f1cd9cbc9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.519379] env[62522]: DEBUG oslo_vmware.api [None req-e2e922f6-87b0-4c66-b804-dc81e10b8897 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1112.519379] env[62522]: value = "task-2416197" [ 1112.519379] env[62522]: _type = "Task" [ 1112.519379] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.527681] env[62522]: DEBUG oslo_vmware.api [None req-e2e922f6-87b0-4c66-b804-dc81e10b8897 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416197, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.543744] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Releasing lock "[datastore2] devstack-image-cache_base/61bbb676-eb4a-448b-9d8a-abdf1c9af6ab" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1112.543980] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Processing image 61bbb676-eb4a-448b-9d8a-abdf1c9af6ab {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1112.544245] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/61bbb676-eb4a-448b-9d8a-abdf1c9af6ab/61bbb676-eb4a-448b-9d8a-abdf1c9af6ab.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1112.544409] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquired lock "[datastore2] devstack-image-cache_base/61bbb676-eb4a-448b-9d8a-abdf1c9af6ab/61bbb676-eb4a-448b-9d8a-abdf1c9af6ab.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1112.544620] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1112.544881] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f5636945-31ed-4cc6-b599-7f68c93d45d8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.561991] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1112.562207] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1112.563030] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac115d07-72b8-40f8-a3f2-65bbe84ae4f9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.580450] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1112.580450] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52534e0b-de90-6122-6769-622cae867044" [ 1112.580450] env[62522]: _type = "Task" [ 1112.580450] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.589378] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52534e0b-de90-6122-6769-622cae867044, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.699577] env[62522]: DEBUG nova.compute.utils [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1112.705049] env[62522]: DEBUG nova.compute.manager [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1112.705049] env[62522]: DEBUG nova.network.neutron [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1112.746120] env[62522]: DEBUG nova.policy [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4796aa7d3ac64de7a7cead0e88bf1542', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd2a0f07d07c841ddbab81783fcf577e9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1112.761206] env[62522]: DEBUG nova.network.neutron [req-af39639d-798e-4271-821a-1bf91eb8a45f req-10393f9d-8c34-4efc-9ff0-c789954c2c81 service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Updated VIF entry in instance network info cache for port 931dfe44-9ac3-4df4-a4ea-6c8612389451. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1112.761563] env[62522]: DEBUG nova.network.neutron [req-af39639d-798e-4271-821a-1bf91eb8a45f req-10393f9d-8c34-4efc-9ff0-c789954c2c81 service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Updating instance_info_cache with network_info: [{"id": "931dfe44-9ac3-4df4-a4ea-6c8612389451", "address": "fa:16:3e:f5:6d:32", "network": {"id": "949f3536-8a7e-4edf-b6cc-6a264fe5fe83", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1891232839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f93394feaa4f4b61a5d3d670d32ec599", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap931dfe44-9a", "ovs_interfaceid": "931dfe44-9ac3-4df4-a4ea-6c8612389451", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1112.860478] env[62522]: DEBUG nova.compute.manager [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1112.891019] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-612a5908-b2e2-41ce-a634-87f33220b3d4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.900238] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5512038c-f2aa-4b8f-9ba1-aa2b3c265cba {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.933340] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ff3b46-fcc5-45f6-bf44-2f6167ac7069 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.942985] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b73a978f-2c6b-4548-a252-b70c18bc7e82 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.960494] env[62522]: DEBUG nova.compute.provider_tree [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1113.030466] env[62522]: DEBUG oslo_vmware.api [None req-e2e922f6-87b0-4c66-b804-dc81e10b8897 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416197, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173145} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.030466] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2e922f6-87b0-4c66-b804-dc81e10b8897 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1113.030466] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e922f6-87b0-4c66-b804-dc81e10b8897 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1113.030466] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e922f6-87b0-4c66-b804-dc81e10b8897 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1113.030466] env[62522]: INFO nova.compute.manager [None req-e2e922f6-87b0-4c66-b804-dc81e10b8897 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1113.030466] env[62522]: DEBUG oslo.service.loopingcall [None req-e2e922f6-87b0-4c66-b804-dc81e10b8897 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1113.030466] env[62522]: DEBUG nova.compute.manager [-] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1113.030466] env[62522]: DEBUG nova.network.neutron [-] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1113.033079] env[62522]: DEBUG nova.network.neutron [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Successfully created port: 608eb061-5051-4459-a45a-6359abaf3221 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1113.090952] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Preparing fetch location {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1113.091287] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Fetch image to [datastore2] OSTACK_IMG_8004a320-a1ce-4518-a126-113924ecf55a/OSTACK_IMG_8004a320-a1ce-4518-a126-113924ecf55a.vmdk {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1113.091582] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Downloading stream optimized image 61bbb676-eb4a-448b-9d8a-abdf1c9af6ab to [datastore2] OSTACK_IMG_8004a320-a1ce-4518-a126-113924ecf55a/OSTACK_IMG_8004a320-a1ce-4518-a126-113924ecf55a.vmdk on the data store datastore2 as vApp {{(pid=62522) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1113.091782] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Downloading image file data 61bbb676-eb4a-448b-9d8a-abdf1c9af6ab to the ESX as VM named 'OSTACK_IMG_8004a320-a1ce-4518-a126-113924ecf55a' {{(pid=62522) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1113.177114] env[62522]: DEBUG oslo_vmware.rw_handles [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1113.177114] env[62522]: value = "resgroup-9" [ 1113.177114] env[62522]: _type = "ResourcePool" [ 1113.177114] env[62522]: }. {{(pid=62522) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1113.178038] env[62522]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-27af2b5b-de13-4b1c-8001-0cf17d0cd457 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.202353] env[62522]: DEBUG oslo_vmware.rw_handles [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lease: (returnval){ [ 1113.202353] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525f6a46-0106-6545-b154-c62450d4ab80" [ 1113.202353] env[62522]: _type = "HttpNfcLease" [ 1113.202353] env[62522]: } obtained for vApp import into resource pool (val){ [ 1113.202353] env[62522]: value = "resgroup-9" [ 1113.202353] env[62522]: _type = "ResourcePool" [ 1113.202353] env[62522]: }. {{(pid=62522) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1113.202991] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the lease: (returnval){ [ 1113.202991] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525f6a46-0106-6545-b154-c62450d4ab80" [ 1113.202991] env[62522]: _type = "HttpNfcLease" [ 1113.202991] env[62522]: } to be ready. {{(pid=62522) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1113.206907] env[62522]: DEBUG nova.compute.manager [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1113.214926] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1113.214926] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525f6a46-0106-6545-b154-c62450d4ab80" [ 1113.214926] env[62522]: _type = "HttpNfcLease" [ 1113.214926] env[62522]: } is initializing. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1113.264955] env[62522]: DEBUG oslo_concurrency.lockutils [req-af39639d-798e-4271-821a-1bf91eb8a45f req-10393f9d-8c34-4efc-9ff0-c789954c2c81 service nova] Releasing lock "refresh_cache-c28d2907-5b59-4df8-91a8-4ba0f2047d89" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1113.383106] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1113.462650] env[62522]: DEBUG nova.scheduler.client.report [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1113.714045] env[62522]: INFO nova.virt.block_device [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Booting with volume d0f4a6c8-a536-4b93-85d4-2b0510f42669 at /dev/sda [ 1113.715353] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1113.715353] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525f6a46-0106-6545-b154-c62450d4ab80" [ 1113.715353] env[62522]: _type = "HttpNfcLease" [ 1113.715353] env[62522]: } is ready. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1113.715901] env[62522]: DEBUG oslo_vmware.rw_handles [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1113.715901] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525f6a46-0106-6545-b154-c62450d4ab80" [ 1113.715901] env[62522]: _type = "HttpNfcLease" [ 1113.715901] env[62522]: }. {{(pid=62522) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1113.716696] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48643df3-352a-4358-8b94-49d30d8d1136 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.726765] env[62522]: DEBUG oslo_vmware.rw_handles [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b6afe5-2c3e-2c6b-3a6f-bc791f76890f/disk-0.vmdk from lease info. {{(pid=62522) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1113.726913] env[62522]: DEBUG oslo_vmware.rw_handles [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Creating HTTP connection to write to file with size = 31670784 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b6afe5-2c3e-2c6b-3a6f-bc791f76890f/disk-0.vmdk. {{(pid=62522) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1113.785429] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7e54a335-ceb1-451e-b3ad-b56caab6777e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.792053] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4982ead1-c415-4f73-b043-d9c50276d485 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.799270] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eec7983-fad2-46ed-9315-2e16868ba73a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.827825] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0fbb437d-8a29-417d-8458-16829fd0d62d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.836790] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b76dbbb-0825-408e-896d-2942fee29486 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.866659] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2252ab53-e0e1-4119-85d1-84e66b4a0f2f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.873848] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12268ee4-fe86-4553-89b6-c42bffb205a7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.889423] env[62522]: DEBUG nova.virt.block_device [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Updating existing volume attachment record: 67f5edfc-f367-4805-ad88-fd90becae186 {{(pid=62522) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1113.956471] env[62522]: DEBUG nova.network.neutron [-] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1113.969536] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.774s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1113.972980] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.081s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1113.973096] env[62522]: DEBUG nova.objects.instance [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lazy-loading 'resources' on Instance uuid c181ce48-9fe2-4400-9047-f8b5a7159dd3 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1113.996020] env[62522]: INFO nova.scheduler.client.report [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Deleted allocations for instance e369d9e1-1345-4038-b5f3-f816fe767a72 [ 1113.999531] env[62522]: DEBUG nova.compute.manager [req-605b1e8f-d384-4e09-a66d-75cbed84e23e req-dd670eeb-57ba-44ca-bb6f-44f430a1787f service nova] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Received event network-vif-deleted-68620e13-f57f-4573-a1d2-4092e26de22d {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1114.461412] env[62522]: INFO nova.compute.manager [-] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Took 1.43 seconds to deallocate network for instance. [ 1114.513326] env[62522]: DEBUG nova.compute.manager [req-7129f25b-8604-4c53-92e2-d8b77e4748e3 req-613dc700-026a-4ad5-8e2d-27a5fe46a041 service nova] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Received event network-vif-plugged-608eb061-5051-4459-a45a-6359abaf3221 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1114.513440] env[62522]: DEBUG oslo_concurrency.lockutils [req-7129f25b-8604-4c53-92e2-d8b77e4748e3 req-613dc700-026a-4ad5-8e2d-27a5fe46a041 service nova] Acquiring lock "981a4839-28d0-4d91-88cd-99c1d263ca4d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1114.513666] env[62522]: DEBUG oslo_concurrency.lockutils [req-7129f25b-8604-4c53-92e2-d8b77e4748e3 req-613dc700-026a-4ad5-8e2d-27a5fe46a041 service nova] Lock "981a4839-28d0-4d91-88cd-99c1d263ca4d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.513828] env[62522]: DEBUG oslo_concurrency.lockutils [req-7129f25b-8604-4c53-92e2-d8b77e4748e3 req-613dc700-026a-4ad5-8e2d-27a5fe46a041 service nova] Lock "981a4839-28d0-4d91-88cd-99c1d263ca4d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1114.514029] env[62522]: DEBUG nova.compute.manager [req-7129f25b-8604-4c53-92e2-d8b77e4748e3 req-613dc700-026a-4ad5-8e2d-27a5fe46a041 service nova] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] No waiting events found dispatching network-vif-plugged-608eb061-5051-4459-a45a-6359abaf3221 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1114.514192] env[62522]: WARNING nova.compute.manager [req-7129f25b-8604-4c53-92e2-d8b77e4748e3 req-613dc700-026a-4ad5-8e2d-27a5fe46a041 service nova] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Received unexpected event network-vif-plugged-608eb061-5051-4459-a45a-6359abaf3221 for instance with vm_state building and task_state block_device_mapping. [ 1114.517502] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7cbb239d-0f5a-4988-9a0a-edaeef893749 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "e369d9e1-1345-4038-b5f3-f816fe767a72" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.327s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1114.599888] env[62522]: DEBUG nova.network.neutron [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Successfully updated port: 608eb061-5051-4459-a45a-6359abaf3221 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1114.701401] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-939938ae-e6d9-41ed-b14e-151ea9af6e09 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.709653] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6cab8af-80c0-4727-a36d-ae2b6ea3584d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.744931] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-344bb7ec-04a0-4a06-9e1d-54f39c675c15 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.755169] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a4e36de-0bd2-444d-a0e6-7d3596e56a78 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.769612] env[62522]: DEBUG nova.compute.provider_tree [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1114.892668] env[62522]: DEBUG oslo_vmware.rw_handles [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Completed reading data from the image iterator. {{(pid=62522) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1114.892668] env[62522]: DEBUG oslo_vmware.rw_handles [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b6afe5-2c3e-2c6b-3a6f-bc791f76890f/disk-0.vmdk. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1114.893683] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ff4302e-082b-494b-8a3a-0d4663fb87e7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.903242] env[62522]: DEBUG oslo_vmware.rw_handles [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b6afe5-2c3e-2c6b-3a6f-bc791f76890f/disk-0.vmdk is in state: ready. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1114.903420] env[62522]: DEBUG oslo_vmware.rw_handles [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b6afe5-2c3e-2c6b-3a6f-bc791f76890f/disk-0.vmdk. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1114.903938] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-4f9c57f0-ac86-4aa9-8976-f2e102b171ac {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.979674] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e2e922f6-87b0-4c66-b804-dc81e10b8897 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1115.106748] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Acquiring lock "refresh_cache-981a4839-28d0-4d91-88cd-99c1d263ca4d" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1115.106899] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Acquired lock "refresh_cache-981a4839-28d0-4d91-88cd-99c1d263ca4d" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.107087] env[62522]: DEBUG nova.network.neutron [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1115.110157] env[62522]: DEBUG oslo_vmware.rw_handles [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b6afe5-2c3e-2c6b-3a6f-bc791f76890f/disk-0.vmdk. {{(pid=62522) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1115.110422] env[62522]: INFO nova.virt.vmwareapi.images [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Downloaded image file data 61bbb676-eb4a-448b-9d8a-abdf1c9af6ab [ 1115.111443] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-722552ce-89c8-4d1d-9c78-7e283cefab09 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.127487] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-179e7db5-bab3-444f-8c14-6825275a6dfc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.159596] env[62522]: INFO nova.virt.vmwareapi.images [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] The imported VM was unregistered [ 1115.161909] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Caching image {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1115.162228] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Creating directory with path [datastore2] devstack-image-cache_base/61bbb676-eb4a-448b-9d8a-abdf1c9af6ab {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1115.162556] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e39ca59f-19c5-4ea4-b98f-28e562ec9260 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.172973] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Created directory with path [datastore2] devstack-image-cache_base/61bbb676-eb4a-448b-9d8a-abdf1c9af6ab {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1115.173131] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_8004a320-a1ce-4518-a126-113924ecf55a/OSTACK_IMG_8004a320-a1ce-4518-a126-113924ecf55a.vmdk to [datastore2] devstack-image-cache_base/61bbb676-eb4a-448b-9d8a-abdf1c9af6ab/61bbb676-eb4a-448b-9d8a-abdf1c9af6ab.vmdk. {{(pid=62522) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1115.173394] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-a15f8062-7197-42e0-8d32-fc4a7fbd462e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.180038] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1115.180038] env[62522]: value = "task-2416200" [ 1115.180038] env[62522]: _type = "Task" [ 1115.180038] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.187291] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416200, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.275513] env[62522]: DEBUG nova.scheduler.client.report [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1115.596011] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "80dd48b7-09fb-4127-af11-b2d52a49ca12" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1115.596011] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "80dd48b7-09fb-4127-af11-b2d52a49ca12" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1115.668279] env[62522]: DEBUG nova.network.neutron [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1115.695367] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416200, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.781067] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.808s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1115.787242] env[62522]: DEBUG oslo_concurrency.lockutils [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.533s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1115.790271] env[62522]: INFO nova.compute.claims [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1115.815636] env[62522]: INFO nova.scheduler.client.report [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Deleted allocations for instance c181ce48-9fe2-4400-9047-f8b5a7159dd3 [ 1115.842365] env[62522]: DEBUG nova.network.neutron [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Updating instance_info_cache with network_info: [{"id": "608eb061-5051-4459-a45a-6359abaf3221", "address": "fa:16:3e:5c:25:a1", "network": {"id": "f04169bc-47cf-4a4e-9c24-43bce1d74613", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1139239981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d2a0f07d07c841ddbab81783fcf577e9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c20f5114-0866-45b3-9a7c-62f113ff83fa", "external-id": "nsx-vlan-transportzone-47", "segmentation_id": 47, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap608eb061-50", "ovs_interfaceid": "608eb061-5051-4459-a45a-6359abaf3221", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1115.987031] env[62522]: DEBUG nova.compute.manager [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1115.987527] env[62522]: DEBUG nova.virt.hardware [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1115.987755] env[62522]: DEBUG nova.virt.hardware [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1115.987911] env[62522]: DEBUG nova.virt.hardware [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1115.988107] env[62522]: DEBUG nova.virt.hardware [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1115.988270] env[62522]: DEBUG nova.virt.hardware [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1115.988631] env[62522]: DEBUG nova.virt.hardware [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1115.988709] env[62522]: DEBUG nova.virt.hardware [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1115.988849] env[62522]: DEBUG nova.virt.hardware [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1115.989030] env[62522]: DEBUG nova.virt.hardware [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1115.989203] env[62522]: DEBUG nova.virt.hardware [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1115.989380] env[62522]: DEBUG nova.virt.hardware [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1115.990382] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95907308-c1fb-48ea-ae66-2642aeff9d90 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.001732] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb32362c-280f-4eaa-ad54-21cdf37e7355 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.097744] env[62522]: DEBUG nova.compute.manager [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1116.192677] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416200, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.326890] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5d529b22-7320-405a-9782-2edfce6ee86e tempest-ServerRescueNegativeTestJSON-454526832 tempest-ServerRescueNegativeTestJSON-454526832-project-member] Lock "c181ce48-9fe2-4400-9047-f8b5a7159dd3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.900s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1116.330051] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1116.330051] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1116.343689] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Releasing lock "refresh_cache-981a4839-28d0-4d91-88cd-99c1d263ca4d" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1116.343981] env[62522]: DEBUG nova.compute.manager [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Instance network_info: |[{"id": "608eb061-5051-4459-a45a-6359abaf3221", "address": "fa:16:3e:5c:25:a1", "network": {"id": "f04169bc-47cf-4a4e-9c24-43bce1d74613", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1139239981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d2a0f07d07c841ddbab81783fcf577e9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c20f5114-0866-45b3-9a7c-62f113ff83fa", "external-id": "nsx-vlan-transportzone-47", "segmentation_id": 47, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap608eb061-50", "ovs_interfaceid": "608eb061-5051-4459-a45a-6359abaf3221", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1116.344541] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5c:25:a1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c20f5114-0866-45b3-9a7c-62f113ff83fa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '608eb061-5051-4459-a45a-6359abaf3221', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1116.353715] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Creating folder: Project (d2a0f07d07c841ddbab81783fcf577e9). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1116.354943] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cf8be8c8-78ac-48de-87ef-fd1e0a3cd371 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.371190] env[62522]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1116.371364] env[62522]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62522) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1116.371755] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Folder already exists: Project (d2a0f07d07c841ddbab81783fcf577e9). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1116.371962] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Creating folder: Instances. Parent ref: group-v489810. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1116.372905] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-73b93128-683b-49e7-b1cf-ee88886b7c1e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.385205] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Created folder: Instances in parent group-v489810. [ 1116.385639] env[62522]: DEBUG oslo.service.loopingcall [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1116.385866] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1116.386102] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f80870e0-d711-4b4f-9b10-3de0dd508a3f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.406802] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1116.406802] env[62522]: value = "task-2416203" [ 1116.406802] env[62522]: _type = "Task" [ 1116.406802] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.415818] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416203, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.542007] env[62522]: DEBUG nova.compute.manager [req-d61c4f78-3d98-4859-b0d4-3279c51b0687 req-c19e3ecd-4e62-46be-b4f7-450fec01b983 service nova] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Received event network-changed-608eb061-5051-4459-a45a-6359abaf3221 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1116.542267] env[62522]: DEBUG nova.compute.manager [req-d61c4f78-3d98-4859-b0d4-3279c51b0687 req-c19e3ecd-4e62-46be-b4f7-450fec01b983 service nova] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Refreshing instance network info cache due to event network-changed-608eb061-5051-4459-a45a-6359abaf3221. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1116.542544] env[62522]: DEBUG oslo_concurrency.lockutils [req-d61c4f78-3d98-4859-b0d4-3279c51b0687 req-c19e3ecd-4e62-46be-b4f7-450fec01b983 service nova] Acquiring lock "refresh_cache-981a4839-28d0-4d91-88cd-99c1d263ca4d" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1116.542711] env[62522]: DEBUG oslo_concurrency.lockutils [req-d61c4f78-3d98-4859-b0d4-3279c51b0687 req-c19e3ecd-4e62-46be-b4f7-450fec01b983 service nova] Acquired lock "refresh_cache-981a4839-28d0-4d91-88cd-99c1d263ca4d" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1116.542898] env[62522]: DEBUG nova.network.neutron [req-d61c4f78-3d98-4859-b0d4-3279c51b0687 req-c19e3ecd-4e62-46be-b4f7-450fec01b983 service nova] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Refreshing network info cache for port 608eb061-5051-4459-a45a-6359abaf3221 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1116.618738] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1116.693056] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416200, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.838831] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1116.838831] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Starting heal instance info cache {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1116.919701] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416203, 'name': CreateVM_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.980255] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78eee3eb-34d8-4880-aa49-28110c559802 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.991029] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38d77823-9cbc-4a2e-8e48-cfcdf289afcb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.031031] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dece1b36-1d7d-42fb-8567-6faac1391adc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.039860] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-166eda1a-24c3-4d8a-9da0-0be8a2d87987 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.063493] env[62522]: DEBUG nova.compute.provider_tree [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1117.194249] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416200, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.308165] env[62522]: DEBUG nova.network.neutron [req-d61c4f78-3d98-4859-b0d4-3279c51b0687 req-c19e3ecd-4e62-46be-b4f7-450fec01b983 service nova] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Updated VIF entry in instance network info cache for port 608eb061-5051-4459-a45a-6359abaf3221. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1117.308553] env[62522]: DEBUG nova.network.neutron [req-d61c4f78-3d98-4859-b0d4-3279c51b0687 req-c19e3ecd-4e62-46be-b4f7-450fec01b983 service nova] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Updating instance_info_cache with network_info: [{"id": "608eb061-5051-4459-a45a-6359abaf3221", "address": "fa:16:3e:5c:25:a1", "network": {"id": "f04169bc-47cf-4a4e-9c24-43bce1d74613", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1139239981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d2a0f07d07c841ddbab81783fcf577e9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c20f5114-0866-45b3-9a7c-62f113ff83fa", "external-id": "nsx-vlan-transportzone-47", "segmentation_id": 47, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap608eb061-50", "ovs_interfaceid": "608eb061-5051-4459-a45a-6359abaf3221", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1117.420532] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416203, 'name': CreateVM_Task, 'duration_secs': 0.730658} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.420747] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1117.421584] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'mount_device': '/dev/sda', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489817', 'volume_id': 'd0f4a6c8-a536-4b93-85d4-2b0510f42669', 'name': 'volume-d0f4a6c8-a536-4b93-85d4-2b0510f42669', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '981a4839-28d0-4d91-88cd-99c1d263ca4d', 'attached_at': '', 'detached_at': '', 'volume_id': 'd0f4a6c8-a536-4b93-85d4-2b0510f42669', 'serial': 'd0f4a6c8-a536-4b93-85d4-2b0510f42669'}, 'attachment_id': '67f5edfc-f367-4805-ad88-fd90becae186', 'delete_on_termination': True, 'guest_format': None, 'device_type': None, 'boot_index': 0, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=62522) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1117.421910] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Root volume attach. Driver type: vmdk {{(pid=62522) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1117.422814] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1978540-1573-4267-abc8-2783fec09b19 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.433212] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-888d8a20-46ba-4534-b9fe-723bb7c4d11c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.441773] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b202fc52-1809-4e3e-bf2d-6a71e2ea9eb5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.449728] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-0ac9f25f-424e-4414-875c-0838a791fa78 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.458724] env[62522]: DEBUG oslo_vmware.api [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Waiting for the task: (returnval){ [ 1117.458724] env[62522]: value = "task-2416204" [ 1117.458724] env[62522]: _type = "Task" [ 1117.458724] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.468201] env[62522]: DEBUG oslo_vmware.api [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Task: {'id': task-2416204, 'name': RelocateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.573204] env[62522]: DEBUG nova.scheduler.client.report [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1117.698838] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416200, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.457984} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.699150] env[62522]: INFO nova.virt.vmwareapi.ds_util [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_8004a320-a1ce-4518-a126-113924ecf55a/OSTACK_IMG_8004a320-a1ce-4518-a126-113924ecf55a.vmdk to [datastore2] devstack-image-cache_base/61bbb676-eb4a-448b-9d8a-abdf1c9af6ab/61bbb676-eb4a-448b-9d8a-abdf1c9af6ab.vmdk. [ 1117.699347] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Cleaning up location [datastore2] OSTACK_IMG_8004a320-a1ce-4518-a126-113924ecf55a {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1117.699511] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_8004a320-a1ce-4518-a126-113924ecf55a {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1117.699792] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2798d72f-6bae-405d-9682-9b8b3e23e238 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.710472] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1117.710472] env[62522]: value = "task-2416205" [ 1117.710472] env[62522]: _type = "Task" [ 1117.710472] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.722861] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416205, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.811751] env[62522]: DEBUG oslo_concurrency.lockutils [req-d61c4f78-3d98-4859-b0d4-3279c51b0687 req-c19e3ecd-4e62-46be-b4f7-450fec01b983 service nova] Releasing lock "refresh_cache-981a4839-28d0-4d91-88cd-99c1d263ca4d" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1117.975801] env[62522]: DEBUG oslo_vmware.api [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Task: {'id': task-2416204, 'name': RelocateVM_Task} progress is 42%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.078873] env[62522]: DEBUG oslo_concurrency.lockutils [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.292s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1118.079580] env[62522]: DEBUG nova.compute.manager [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1118.086024] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.875s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1118.086024] env[62522]: INFO nova.compute.claims [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1118.229017] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416205, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067159} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.229017] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1118.229017] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Releasing lock "[datastore2] devstack-image-cache_base/61bbb676-eb4a-448b-9d8a-abdf1c9af6ab/61bbb676-eb4a-448b-9d8a-abdf1c9af6ab.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1118.229017] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/61bbb676-eb4a-448b-9d8a-abdf1c9af6ab/61bbb676-eb4a-448b-9d8a-abdf1c9af6ab.vmdk to [datastore2] c28d2907-5b59-4df8-91a8-4ba0f2047d89/c28d2907-5b59-4df8-91a8-4ba0f2047d89.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1118.229017] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-55b4c4c4-0247-42bd-810f-555c8979b32c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.237798] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1118.237798] env[62522]: value = "task-2416206" [ 1118.237798] env[62522]: _type = "Task" [ 1118.237798] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.249757] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416206, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.475462] env[62522]: DEBUG oslo_vmware.api [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Task: {'id': task-2416204, 'name': RelocateVM_Task} progress is 56%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.589446] env[62522]: DEBUG nova.compute.utils [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1118.594651] env[62522]: DEBUG nova.compute.manager [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1118.594651] env[62522]: DEBUG nova.network.neutron [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1118.672167] env[62522]: DEBUG nova.policy [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab5e5a8e6ee64aad8d52342ee3f5af36', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4bdd1f5caf09454d808bcdc15df2d3a7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1118.754481] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416206, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.850757] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Didn't find any instances for network info cache update. {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 1118.851102] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1118.851382] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1118.851552] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1118.851712] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1118.851859] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1118.852016] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1118.852167] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62522) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1118.852316] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1118.972620] env[62522]: DEBUG oslo_vmware.api [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Task: {'id': task-2416204, 'name': RelocateVM_Task} progress is 71%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.095259] env[62522]: DEBUG nova.compute.manager [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1119.226789] env[62522]: DEBUG nova.network.neutron [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Successfully created port: 43d86dfd-5c95-438b-808b-91ab1078323b {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1119.251056] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416206, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.323710] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a37d81e-e5eb-4462-9744-baeae27abb6f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.334955] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9333ff0-6d4d-4f13-974d-29b91a498c6e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.378775] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1119.379948] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4509eb23-e278-405d-8165-6e1a49f9f78a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.390945] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5682b31b-fc84-41ca-a639-7fb791cae683 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.409346] env[62522]: DEBUG nova.compute.provider_tree [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1119.474745] env[62522]: DEBUG oslo_vmware.api [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Task: {'id': task-2416204, 'name': RelocateVM_Task} progress is 84%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.752977] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416206, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.912714] env[62522]: DEBUG nova.scheduler.client.report [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1119.974196] env[62522]: DEBUG oslo_vmware.api [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Task: {'id': task-2416204, 'name': RelocateVM_Task} progress is 97%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.108852] env[62522]: DEBUG nova.compute.manager [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1120.139250] env[62522]: DEBUG nova.virt.hardware [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1120.139437] env[62522]: DEBUG nova.virt.hardware [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1120.139557] env[62522]: DEBUG nova.virt.hardware [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1120.139738] env[62522]: DEBUG nova.virt.hardware [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1120.139881] env[62522]: DEBUG nova.virt.hardware [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1120.140039] env[62522]: DEBUG nova.virt.hardware [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1120.140257] env[62522]: DEBUG nova.virt.hardware [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1120.140430] env[62522]: DEBUG nova.virt.hardware [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1120.140600] env[62522]: DEBUG nova.virt.hardware [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1120.140764] env[62522]: DEBUG nova.virt.hardware [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1120.140938] env[62522]: DEBUG nova.virt.hardware [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1120.142061] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-211d0ff5-3c08-4924-91cb-3e73c31bdcac {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.150937] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d42272ca-cdc4-41b1-b3ea-520ba680369f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.251849] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416206, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.419326] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.336s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1120.419915] env[62522]: DEBUG nova.compute.manager [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1120.422802] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.144s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1120.423042] env[62522]: DEBUG nova.objects.instance [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Lazy-loading 'resources' on Instance uuid 92604d35-7e59-45b0-9dce-32e515703936 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1120.473241] env[62522]: DEBUG oslo_vmware.api [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Task: {'id': task-2416204, 'name': RelocateVM_Task} progress is 97%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.752675] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416206, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.377058} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.752965] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/61bbb676-eb4a-448b-9d8a-abdf1c9af6ab/61bbb676-eb4a-448b-9d8a-abdf1c9af6ab.vmdk to [datastore2] c28d2907-5b59-4df8-91a8-4ba0f2047d89/c28d2907-5b59-4df8-91a8-4ba0f2047d89.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1120.753765] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70673922-cf81-4b4e-b16d-f3944ddb2d69 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.778304] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Reconfiguring VM instance instance-0000003c to attach disk [datastore2] c28d2907-5b59-4df8-91a8-4ba0f2047d89/c28d2907-5b59-4df8-91a8-4ba0f2047d89.vmdk or device None with type streamOptimized {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1120.779547] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0804da70-2d5a-46bf-8666-a9701f41ad95 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.796775] env[62522]: DEBUG nova.compute.manager [req-65eab142-dbd5-4fbe-b89e-f372cb25b63f req-6a4a9109-ffeb-4cf0-b7dd-87be066963d9 service nova] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Received event network-vif-plugged-43d86dfd-5c95-438b-808b-91ab1078323b {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1120.797253] env[62522]: DEBUG oslo_concurrency.lockutils [req-65eab142-dbd5-4fbe-b89e-f372cb25b63f req-6a4a9109-ffeb-4cf0-b7dd-87be066963d9 service nova] Acquiring lock "93a2505e-814d-4809-90a9-0bc215406efd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1120.797512] env[62522]: DEBUG oslo_concurrency.lockutils [req-65eab142-dbd5-4fbe-b89e-f372cb25b63f req-6a4a9109-ffeb-4cf0-b7dd-87be066963d9 service nova] Lock "93a2505e-814d-4809-90a9-0bc215406efd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1120.797744] env[62522]: DEBUG oslo_concurrency.lockutils [req-65eab142-dbd5-4fbe-b89e-f372cb25b63f req-6a4a9109-ffeb-4cf0-b7dd-87be066963d9 service nova] Lock "93a2505e-814d-4809-90a9-0bc215406efd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1120.798111] env[62522]: DEBUG nova.compute.manager [req-65eab142-dbd5-4fbe-b89e-f372cb25b63f req-6a4a9109-ffeb-4cf0-b7dd-87be066963d9 service nova] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] No waiting events found dispatching network-vif-plugged-43d86dfd-5c95-438b-808b-91ab1078323b {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1120.798331] env[62522]: WARNING nova.compute.manager [req-65eab142-dbd5-4fbe-b89e-f372cb25b63f req-6a4a9109-ffeb-4cf0-b7dd-87be066963d9 service nova] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Received unexpected event network-vif-plugged-43d86dfd-5c95-438b-808b-91ab1078323b for instance with vm_state building and task_state spawning. [ 1120.805882] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1120.805882] env[62522]: value = "task-2416207" [ 1120.805882] env[62522]: _type = "Task" [ 1120.805882] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.815571] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416207, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.864266] env[62522]: DEBUG nova.network.neutron [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Successfully updated port: 43d86dfd-5c95-438b-808b-91ab1078323b {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1120.926052] env[62522]: DEBUG nova.compute.utils [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1120.931529] env[62522]: DEBUG nova.compute.manager [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1120.931729] env[62522]: DEBUG nova.network.neutron [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1120.976843] env[62522]: DEBUG oslo_vmware.api [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Task: {'id': task-2416204, 'name': RelocateVM_Task} progress is 98%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.978291] env[62522]: DEBUG nova.policy [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3a4ba3a3d3a34495b7a7e0618577d60f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '071dd4c295a54e388099d5bf0f4e300b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1121.185398] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a70fd43-9b2d-47b8-8707-3f0cab5ebd60 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.195374] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a80704-494c-460a-a479-6216a58c28ec {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.229289] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6227598-5bb9-46b3-b682-45326e69cba6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.237567] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84f8c8cb-441f-42fa-a481-5dc031c6be0d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.251592] env[62522]: DEBUG nova.compute.provider_tree [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1121.275960] env[62522]: DEBUG nova.network.neutron [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Successfully created port: 58444651-b47b-44d5-b240-53949c79df86 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1121.315555] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416207, 'name': ReconfigVM_Task, 'duration_secs': 0.364291} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.315824] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Reconfigured VM instance instance-0000003c to attach disk [datastore2] c28d2907-5b59-4df8-91a8-4ba0f2047d89/c28d2907-5b59-4df8-91a8-4ba0f2047d89.vmdk or device None with type streamOptimized {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1121.317199] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'encrypted': False, 'size': 0, 'device_name': '/dev/sda', 'guest_format': None, 'device_type': 'disk', 'encryption_format': None, 'encryption_secret_uuid': None, 'boot_index': 0, 'disk_bus': None, 'encryption_options': None, 'image_id': '2ee4561b-ba48-4f45-82f6-eac89be98290'}], 'ephemerals': [], 'block_device_mapping': [{'mount_device': '/dev/sdb', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489819', 'volume_id': '1a6a964c-b9d4-4849-bb10-c20d35c6b3ec', 'name': 'volume-1a6a964c-b9d4-4849-bb10-c20d35c6b3ec', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'c28d2907-5b59-4df8-91a8-4ba0f2047d89', 'attached_at': '', 'detached_at': '', 'volume_id': '1a6a964c-b9d4-4849-bb10-c20d35c6b3ec', 'serial': '1a6a964c-b9d4-4849-bb10-c20d35c6b3ec'}, 'attachment_id': '5ae0885a-56b8-48d7-bd72-71043df2bc65', 'delete_on_termination': False, 'guest_format': None, 'device_type': None, 'boot_index': None, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=62522) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1121.317393] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Volume attach. Driver type: vmdk {{(pid=62522) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1121.317589] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489819', 'volume_id': '1a6a964c-b9d4-4849-bb10-c20d35c6b3ec', 'name': 'volume-1a6a964c-b9d4-4849-bb10-c20d35c6b3ec', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'c28d2907-5b59-4df8-91a8-4ba0f2047d89', 'attached_at': '', 'detached_at': '', 'volume_id': '1a6a964c-b9d4-4849-bb10-c20d35c6b3ec', 'serial': '1a6a964c-b9d4-4849-bb10-c20d35c6b3ec'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1121.318347] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b119c1c-3204-4ab8-b6f1-ff48c45ad057 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.334527] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bef29fa1-6835-4053-9747-8e55e2e7c053 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.358842] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] volume-1a6a964c-b9d4-4849-bb10-c20d35c6b3ec/volume-1a6a964c-b9d4-4849-bb10-c20d35c6b3ec.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1121.359229] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e0b4914-1e6a-44ef-afba-36fac06cf1d8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.372124] env[62522]: DEBUG oslo_concurrency.lockutils [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "refresh_cache-93a2505e-814d-4809-90a9-0bc215406efd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1121.372260] env[62522]: DEBUG oslo_concurrency.lockutils [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired lock "refresh_cache-93a2505e-814d-4809-90a9-0bc215406efd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.372448] env[62522]: DEBUG nova.network.neutron [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1121.379259] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1121.379259] env[62522]: value = "task-2416208" [ 1121.379259] env[62522]: _type = "Task" [ 1121.379259] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.388542] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416208, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.434880] env[62522]: DEBUG nova.compute.manager [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1121.474912] env[62522]: DEBUG oslo_vmware.api [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Task: {'id': task-2416204, 'name': RelocateVM_Task, 'duration_secs': 3.664867} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.475132] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Volume attach. Driver type: vmdk {{(pid=62522) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1121.475344] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489817', 'volume_id': 'd0f4a6c8-a536-4b93-85d4-2b0510f42669', 'name': 'volume-d0f4a6c8-a536-4b93-85d4-2b0510f42669', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '981a4839-28d0-4d91-88cd-99c1d263ca4d', 'attached_at': '', 'detached_at': '', 'volume_id': 'd0f4a6c8-a536-4b93-85d4-2b0510f42669', 'serial': 'd0f4a6c8-a536-4b93-85d4-2b0510f42669'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1121.476271] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd4b6f37-5a4b-4edb-9842-dcada41bb5e4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.492859] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a58bb44-ec45-47b4-aaac-4d815ec0cc18 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.514348] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Reconfiguring VM instance instance-00000061 to attach disk [datastore2] volume-d0f4a6c8-a536-4b93-85d4-2b0510f42669/volume-d0f4a6c8-a536-4b93-85d4-2b0510f42669.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1121.514905] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bfbf1ee8-6906-40c2-a10c-d08a0f19a046 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.534560] env[62522]: DEBUG oslo_vmware.api [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Waiting for the task: (returnval){ [ 1121.534560] env[62522]: value = "task-2416209" [ 1121.534560] env[62522]: _type = "Task" [ 1121.534560] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.543122] env[62522]: DEBUG oslo_vmware.api [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Task: {'id': task-2416209, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.755163] env[62522]: DEBUG nova.scheduler.client.report [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1121.889846] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416208, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.935120] env[62522]: DEBUG nova.network.neutron [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1122.046306] env[62522]: DEBUG oslo_vmware.api [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Task: {'id': task-2416209, 'name': ReconfigVM_Task, 'duration_secs': 0.307871} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.046584] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Reconfigured VM instance instance-00000061 to attach disk [datastore2] volume-d0f4a6c8-a536-4b93-85d4-2b0510f42669/volume-d0f4a6c8-a536-4b93-85d4-2b0510f42669.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1122.051317] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b27798c-9699-4427-8ce6-bd468ccf00f2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.069527] env[62522]: DEBUG oslo_vmware.api [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Waiting for the task: (returnval){ [ 1122.069527] env[62522]: value = "task-2416210" [ 1122.069527] env[62522]: _type = "Task" [ 1122.069527] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.080599] env[62522]: DEBUG oslo_vmware.api [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Task: {'id': task-2416210, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.166912] env[62522]: DEBUG nova.network.neutron [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Updating instance_info_cache with network_info: [{"id": "43d86dfd-5c95-438b-808b-91ab1078323b", "address": "fa:16:3e:ee:37:0f", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43d86dfd-5c", "ovs_interfaceid": "43d86dfd-5c95-438b-808b-91ab1078323b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1122.260669] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.838s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1122.263696] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 9.813s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1122.280764] env[62522]: INFO nova.scheduler.client.report [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Deleted allocations for instance 92604d35-7e59-45b0-9dce-32e515703936 [ 1122.391074] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416208, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.443792] env[62522]: DEBUG nova.compute.manager [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1122.471374] env[62522]: DEBUG nova.virt.hardware [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1122.471374] env[62522]: DEBUG nova.virt.hardware [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1122.471554] env[62522]: DEBUG nova.virt.hardware [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1122.471597] env[62522]: DEBUG nova.virt.hardware [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1122.471715] env[62522]: DEBUG nova.virt.hardware [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1122.471866] env[62522]: DEBUG nova.virt.hardware [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1122.472152] env[62522]: DEBUG nova.virt.hardware [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1122.472339] env[62522]: DEBUG nova.virt.hardware [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1122.472550] env[62522]: DEBUG nova.virt.hardware [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1122.472726] env[62522]: DEBUG nova.virt.hardware [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1122.472899] env[62522]: DEBUG nova.virt.hardware [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1122.473842] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd586317-71b2-4f80-ab4c-f237c19e5ecf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.482130] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14dcf8ad-b7ee-4941-8066-7b40412cd1e3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.580590] env[62522]: DEBUG oslo_vmware.api [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Task: {'id': task-2416210, 'name': ReconfigVM_Task, 'duration_secs': 0.135688} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.580923] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489817', 'volume_id': 'd0f4a6c8-a536-4b93-85d4-2b0510f42669', 'name': 'volume-d0f4a6c8-a536-4b93-85d4-2b0510f42669', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '981a4839-28d0-4d91-88cd-99c1d263ca4d', 'attached_at': '', 'detached_at': '', 'volume_id': 'd0f4a6c8-a536-4b93-85d4-2b0510f42669', 'serial': 'd0f4a6c8-a536-4b93-85d4-2b0510f42669'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1122.581487] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ed398bee-860a-4457-9e75-be7be7dbe15b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.589774] env[62522]: DEBUG oslo_vmware.api [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Waiting for the task: (returnval){ [ 1122.589774] env[62522]: value = "task-2416211" [ 1122.589774] env[62522]: _type = "Task" [ 1122.589774] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.597750] env[62522]: DEBUG oslo_vmware.api [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Task: {'id': task-2416211, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.670525] env[62522]: DEBUG oslo_concurrency.lockutils [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Releasing lock "refresh_cache-93a2505e-814d-4809-90a9-0bc215406efd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1122.671080] env[62522]: DEBUG nova.compute.manager [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Instance network_info: |[{"id": "43d86dfd-5c95-438b-808b-91ab1078323b", "address": "fa:16:3e:ee:37:0f", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43d86dfd-5c", "ovs_interfaceid": "43d86dfd-5c95-438b-808b-91ab1078323b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1122.671517] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ee:37:0f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee555dfd-3d1a-4220-89cd-ffba64e4acf0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '43d86dfd-5c95-438b-808b-91ab1078323b', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1122.682727] env[62522]: DEBUG oslo.service.loopingcall [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1122.683045] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1122.683318] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-37c17f16-0cac-4efa-ad87-14885bf82294 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.705490] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1122.705490] env[62522]: value = "task-2416212" [ 1122.705490] env[62522]: _type = "Task" [ 1122.705490] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.714925] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416212, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.742860] env[62522]: DEBUG nova.network.neutron [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Successfully updated port: 58444651-b47b-44d5-b240-53949c79df86 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1122.769038] env[62522]: INFO nova.compute.claims [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1122.788814] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1419a2e1-a407-4c81-a3fe-27528320ebd6 tempest-ServersAaction247Test-366820888 tempest-ServersAaction247Test-366820888-project-member] Lock "92604d35-7e59-45b0-9dce-32e515703936" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.806s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1122.814018] env[62522]: DEBUG nova.compute.manager [req-fab5d0b9-5ba4-4bdd-bf90-c23363e532fe req-1f1d5e48-427f-4266-9b31-bbe3f7fc0dc4 service nova] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Received event network-changed-43d86dfd-5c95-438b-808b-91ab1078323b {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1122.814288] env[62522]: DEBUG nova.compute.manager [req-fab5d0b9-5ba4-4bdd-bf90-c23363e532fe req-1f1d5e48-427f-4266-9b31-bbe3f7fc0dc4 service nova] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Refreshing instance network info cache due to event network-changed-43d86dfd-5c95-438b-808b-91ab1078323b. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1122.814575] env[62522]: DEBUG oslo_concurrency.lockutils [req-fab5d0b9-5ba4-4bdd-bf90-c23363e532fe req-1f1d5e48-427f-4266-9b31-bbe3f7fc0dc4 service nova] Acquiring lock "refresh_cache-93a2505e-814d-4809-90a9-0bc215406efd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1122.814788] env[62522]: DEBUG oslo_concurrency.lockutils [req-fab5d0b9-5ba4-4bdd-bf90-c23363e532fe req-1f1d5e48-427f-4266-9b31-bbe3f7fc0dc4 service nova] Acquired lock "refresh_cache-93a2505e-814d-4809-90a9-0bc215406efd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1122.814937] env[62522]: DEBUG nova.network.neutron [req-fab5d0b9-5ba4-4bdd-bf90-c23363e532fe req-1f1d5e48-427f-4266-9b31-bbe3f7fc0dc4 service nova] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Refreshing network info cache for port 43d86dfd-5c95-438b-808b-91ab1078323b {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1122.892045] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416208, 'name': ReconfigVM_Task, 'duration_secs': 1.090072} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.892045] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Reconfigured VM instance instance-0000003c to attach disk [datastore1] volume-1a6a964c-b9d4-4849-bb10-c20d35c6b3ec/volume-1a6a964c-b9d4-4849-bb10-c20d35c6b3ec.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1122.896662] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5bfdcac-18b2-4538-80b2-6fe501005e92 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.914523] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1122.914523] env[62522]: value = "task-2416213" [ 1122.914523] env[62522]: _type = "Task" [ 1122.914523] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.924173] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416213, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.100298] env[62522]: DEBUG oslo_vmware.api [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Task: {'id': task-2416211, 'name': Rename_Task, 'duration_secs': 0.137031} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.100534] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1123.100775] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0a92d916-30f9-4fe5-8f24-e101b58cc081 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.106964] env[62522]: DEBUG oslo_vmware.api [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Waiting for the task: (returnval){ [ 1123.106964] env[62522]: value = "task-2416214" [ 1123.106964] env[62522]: _type = "Task" [ 1123.106964] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.114497] env[62522]: DEBUG oslo_vmware.api [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Task: {'id': task-2416214, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.215369] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416212, 'name': CreateVM_Task, 'duration_secs': 0.350969} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.215564] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1123.216258] env[62522]: DEBUG oslo_concurrency.lockutils [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1123.216465] env[62522]: DEBUG oslo_concurrency.lockutils [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1123.216791] env[62522]: DEBUG oslo_concurrency.lockutils [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1123.217070] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd4ae72b-087b-450a-bd96-7ff2996e1a05 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.222178] env[62522]: DEBUG oslo_vmware.api [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 1123.222178] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5203fbfb-ae48-2b98-70fc-923a4a8931a2" [ 1123.222178] env[62522]: _type = "Task" [ 1123.222178] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.230716] env[62522]: DEBUG oslo_vmware.api [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5203fbfb-ae48-2b98-70fc-923a4a8931a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.245489] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "refresh_cache-b31195c2-29f4-475c-baa7-fcb4791b7278" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1123.245996] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquired lock "refresh_cache-b31195c2-29f4-475c-baa7-fcb4791b7278" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1123.245996] env[62522]: DEBUG nova.network.neutron [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1123.276266] env[62522]: INFO nova.compute.resource_tracker [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Updating resource usage from migration 2b15f686-afa5-4f5f-a5cd-77d737183ec3 [ 1123.424987] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416213, 'name': ReconfigVM_Task, 'duration_secs': 0.149119} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.428427] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489819', 'volume_id': '1a6a964c-b9d4-4849-bb10-c20d35c6b3ec', 'name': 'volume-1a6a964c-b9d4-4849-bb10-c20d35c6b3ec', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'c28d2907-5b59-4df8-91a8-4ba0f2047d89', 'attached_at': '', 'detached_at': '', 'volume_id': '1a6a964c-b9d4-4849-bb10-c20d35c6b3ec', 'serial': '1a6a964c-b9d4-4849-bb10-c20d35c6b3ec'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1123.429600] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-084e6de2-ffc9-4033-ae5d-731432506160 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.432202] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-469a0463-f135-4a96-9a4d-582edd2678d4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.441367] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8edef1a-df85-4b9d-80d0-728f451e6f0a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.444544] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1123.444544] env[62522]: value = "task-2416215" [ 1123.444544] env[62522]: _type = "Task" [ 1123.444544] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.484119] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a2f5d06-15fd-4db7-95f7-4315ab1c6b35 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.486859] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416215, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.492722] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddbc8de7-eba7-4289-a9be-ecf0782efd0c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.508814] env[62522]: DEBUG nova.compute.provider_tree [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1123.577545] env[62522]: DEBUG nova.network.neutron [req-fab5d0b9-5ba4-4bdd-bf90-c23363e532fe req-1f1d5e48-427f-4266-9b31-bbe3f7fc0dc4 service nova] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Updated VIF entry in instance network info cache for port 43d86dfd-5c95-438b-808b-91ab1078323b. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1123.577899] env[62522]: DEBUG nova.network.neutron [req-fab5d0b9-5ba4-4bdd-bf90-c23363e532fe req-1f1d5e48-427f-4266-9b31-bbe3f7fc0dc4 service nova] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Updating instance_info_cache with network_info: [{"id": "43d86dfd-5c95-438b-808b-91ab1078323b", "address": "fa:16:3e:ee:37:0f", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43d86dfd-5c", "ovs_interfaceid": "43d86dfd-5c95-438b-808b-91ab1078323b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1123.617455] env[62522]: DEBUG oslo_vmware.api [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Task: {'id': task-2416214, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.733144] env[62522]: DEBUG oslo_vmware.api [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5203fbfb-ae48-2b98-70fc-923a4a8931a2, 'name': SearchDatastore_Task, 'duration_secs': 0.010753} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.733488] env[62522]: DEBUG oslo_concurrency.lockutils [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1123.733771] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1123.734030] env[62522]: DEBUG oslo_concurrency.lockutils [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1123.734217] env[62522]: DEBUG oslo_concurrency.lockutils [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1123.734377] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1123.734647] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a0257c91-7684-4f37-ad2c-21e194afa39d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.750678] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1123.754364] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1123.755209] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65ed73ec-0ca3-4b5c-80f6-ca8ed0d0d699 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.763103] env[62522]: DEBUG oslo_vmware.api [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 1123.763103] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5245b4d3-7f42-f9e9-7917-8034dd3fd1bf" [ 1123.763103] env[62522]: _type = "Task" [ 1123.763103] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.771277] env[62522]: DEBUG oslo_vmware.api [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5245b4d3-7f42-f9e9-7917-8034dd3fd1bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.786532] env[62522]: DEBUG nova.network.neutron [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1123.909682] env[62522]: DEBUG nova.network.neutron [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Updating instance_info_cache with network_info: [{"id": "58444651-b47b-44d5-b240-53949c79df86", "address": "fa:16:3e:6c:81:68", "network": {"id": "fd993a8b-571c-4873-a5d6-4d8c60e23d38", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2084093882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "071dd4c295a54e388099d5bf0f4e300b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58444651-b4", "ovs_interfaceid": "58444651-b47b-44d5-b240-53949c79df86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1123.955838] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416215, 'name': Rename_Task, 'duration_secs': 0.165991} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.956718] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1123.956985] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-55b9d57d-2e9c-4b5e-aa63-dc865d5ea2e9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.964330] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1123.964330] env[62522]: value = "task-2416216" [ 1123.964330] env[62522]: _type = "Task" [ 1123.964330] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.972912] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416216, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.012844] env[62522]: DEBUG nova.scheduler.client.report [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1124.080596] env[62522]: DEBUG oslo_concurrency.lockutils [req-fab5d0b9-5ba4-4bdd-bf90-c23363e532fe req-1f1d5e48-427f-4266-9b31-bbe3f7fc0dc4 service nova] Releasing lock "refresh_cache-93a2505e-814d-4809-90a9-0bc215406efd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1124.080897] env[62522]: DEBUG nova.compute.manager [req-fab5d0b9-5ba4-4bdd-bf90-c23363e532fe req-1f1d5e48-427f-4266-9b31-bbe3f7fc0dc4 service nova] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Received event network-vif-plugged-58444651-b47b-44d5-b240-53949c79df86 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1124.081115] env[62522]: DEBUG oslo_concurrency.lockutils [req-fab5d0b9-5ba4-4bdd-bf90-c23363e532fe req-1f1d5e48-427f-4266-9b31-bbe3f7fc0dc4 service nova] Acquiring lock "b31195c2-29f4-475c-baa7-fcb4791b7278-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1124.081349] env[62522]: DEBUG oslo_concurrency.lockutils [req-fab5d0b9-5ba4-4bdd-bf90-c23363e532fe req-1f1d5e48-427f-4266-9b31-bbe3f7fc0dc4 service nova] Lock "b31195c2-29f4-475c-baa7-fcb4791b7278-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1124.081512] env[62522]: DEBUG oslo_concurrency.lockutils [req-fab5d0b9-5ba4-4bdd-bf90-c23363e532fe req-1f1d5e48-427f-4266-9b31-bbe3f7fc0dc4 service nova] Lock "b31195c2-29f4-475c-baa7-fcb4791b7278-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1124.081701] env[62522]: DEBUG nova.compute.manager [req-fab5d0b9-5ba4-4bdd-bf90-c23363e532fe req-1f1d5e48-427f-4266-9b31-bbe3f7fc0dc4 service nova] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] No waiting events found dispatching network-vif-plugged-58444651-b47b-44d5-b240-53949c79df86 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1124.081885] env[62522]: WARNING nova.compute.manager [req-fab5d0b9-5ba4-4bdd-bf90-c23363e532fe req-1f1d5e48-427f-4266-9b31-bbe3f7fc0dc4 service nova] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Received unexpected event network-vif-plugged-58444651-b47b-44d5-b240-53949c79df86 for instance with vm_state building and task_state spawning. [ 1124.082076] env[62522]: DEBUG nova.compute.manager [req-fab5d0b9-5ba4-4bdd-bf90-c23363e532fe req-1f1d5e48-427f-4266-9b31-bbe3f7fc0dc4 service nova] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Received event network-changed-58444651-b47b-44d5-b240-53949c79df86 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1124.082240] env[62522]: DEBUG nova.compute.manager [req-fab5d0b9-5ba4-4bdd-bf90-c23363e532fe req-1f1d5e48-427f-4266-9b31-bbe3f7fc0dc4 service nova] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Refreshing instance network info cache due to event network-changed-58444651-b47b-44d5-b240-53949c79df86. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1124.082441] env[62522]: DEBUG oslo_concurrency.lockutils [req-fab5d0b9-5ba4-4bdd-bf90-c23363e532fe req-1f1d5e48-427f-4266-9b31-bbe3f7fc0dc4 service nova] Acquiring lock "refresh_cache-b31195c2-29f4-475c-baa7-fcb4791b7278" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1124.119026] env[62522]: DEBUG oslo_vmware.api [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Task: {'id': task-2416214, 'name': PowerOnVM_Task, 'duration_secs': 0.523044} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.119026] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1124.119026] env[62522]: INFO nova.compute.manager [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Took 8.13 seconds to spawn the instance on the hypervisor. [ 1124.119257] env[62522]: DEBUG nova.compute.manager [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1124.120048] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79fb2744-2592-4437-8f0e-2f41457f2160 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.274378] env[62522]: DEBUG oslo_vmware.api [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5245b4d3-7f42-f9e9-7917-8034dd3fd1bf, 'name': SearchDatastore_Task, 'duration_secs': 0.008914} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.275188] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-666e2437-5424-4d31-becb-8a03cbac9084 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.281837] env[62522]: DEBUG oslo_vmware.api [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 1124.281837] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522a82a7-856e-7d50-b0cc-a34b7a02b94a" [ 1124.281837] env[62522]: _type = "Task" [ 1124.281837] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.290821] env[62522]: DEBUG oslo_vmware.api [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522a82a7-856e-7d50-b0cc-a34b7a02b94a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.412126] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Releasing lock "refresh_cache-b31195c2-29f4-475c-baa7-fcb4791b7278" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1124.412557] env[62522]: DEBUG nova.compute.manager [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Instance network_info: |[{"id": "58444651-b47b-44d5-b240-53949c79df86", "address": "fa:16:3e:6c:81:68", "network": {"id": "fd993a8b-571c-4873-a5d6-4d8c60e23d38", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2084093882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "071dd4c295a54e388099d5bf0f4e300b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58444651-b4", "ovs_interfaceid": "58444651-b47b-44d5-b240-53949c79df86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1124.412891] env[62522]: DEBUG oslo_concurrency.lockutils [req-fab5d0b9-5ba4-4bdd-bf90-c23363e532fe req-1f1d5e48-427f-4266-9b31-bbe3f7fc0dc4 service nova] Acquired lock "refresh_cache-b31195c2-29f4-475c-baa7-fcb4791b7278" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1124.413095] env[62522]: DEBUG nova.network.neutron [req-fab5d0b9-5ba4-4bdd-bf90-c23363e532fe req-1f1d5e48-427f-4266-9b31-bbe3f7fc0dc4 service nova] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Refreshing network info cache for port 58444651-b47b-44d5-b240-53949c79df86 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1124.414457] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6c:81:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd7b5f1ef-d4b9-4ec3-b047-17e4cb349d25', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '58444651-b47b-44d5-b240-53949c79df86', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1124.421948] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Creating folder: Project (071dd4c295a54e388099d5bf0f4e300b). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1124.423067] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8111a9ee-b66d-4cb0-b59f-201fb8fd4d1c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.436732] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Created folder: Project (071dd4c295a54e388099d5bf0f4e300b) in parent group-v489562. [ 1124.436943] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Creating folder: Instances. Parent ref: group-v489835. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1124.437205] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8890a46d-1d86-40ff-b5b0-7835e7cf6d9c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.447465] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Created folder: Instances in parent group-v489835. [ 1124.447698] env[62522]: DEBUG oslo.service.loopingcall [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1124.447887] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1124.448102] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-17b89c15-e878-4c84-b40a-2e1a4454ec81 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.468536] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1124.468536] env[62522]: value = "task-2416219" [ 1124.468536] env[62522]: _type = "Task" [ 1124.468536] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.474622] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416216, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.478919] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416219, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.520619] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.257s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1124.520789] env[62522]: INFO nova.compute.manager [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Migrating [ 1124.528085] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.145s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1124.530024] env[62522]: INFO nova.compute.claims [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1124.642121] env[62522]: INFO nova.compute.manager [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Took 24.80 seconds to build instance. [ 1124.793482] env[62522]: DEBUG oslo_vmware.api [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522a82a7-856e-7d50-b0cc-a34b7a02b94a, 'name': SearchDatastore_Task, 'duration_secs': 0.009855} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.793715] env[62522]: DEBUG oslo_concurrency.lockutils [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1124.793979] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 93a2505e-814d-4809-90a9-0bc215406efd/93a2505e-814d-4809-90a9-0bc215406efd.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1124.794268] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fa733b34-c661-495b-ad71-a8ae57b5e064 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.802061] env[62522]: DEBUG oslo_vmware.api [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 1124.802061] env[62522]: value = "task-2416220" [ 1124.802061] env[62522]: _type = "Task" [ 1124.802061] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.810531] env[62522]: DEBUG oslo_vmware.api [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416220, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.979316] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416219, 'name': CreateVM_Task, 'duration_secs': 0.377956} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.982817] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1124.983169] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416216, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.983919] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1124.984102] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1124.984435] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1124.984715] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ba6dcad-084f-4c5b-a48b-c6ba257fe2dc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.990978] env[62522]: DEBUG oslo_vmware.api [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1124.990978] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d9f0d2-6ce9-9b8f-c8e4-58842d732bb5" [ 1124.990978] env[62522]: _type = "Task" [ 1124.990978] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.000199] env[62522]: DEBUG oslo_vmware.api [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d9f0d2-6ce9-9b8f-c8e4-58842d732bb5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.044410] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "refresh_cache-7f8a8270-5014-446c-aa42-ea0b4079e5a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1125.044574] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquired lock "refresh_cache-7f8a8270-5014-446c-aa42-ea0b4079e5a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1125.044774] env[62522]: DEBUG nova.network.neutron [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1125.147034] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cd2e00ee-f0a1-45f5-8a29-2fab00ddc33f tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Lock "981a4839-28d0-4d91-88cd-99c1d263ca4d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.310s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1125.156515] env[62522]: DEBUG nova.network.neutron [req-fab5d0b9-5ba4-4bdd-bf90-c23363e532fe req-1f1d5e48-427f-4266-9b31-bbe3f7fc0dc4 service nova] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Updated VIF entry in instance network info cache for port 58444651-b47b-44d5-b240-53949c79df86. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1125.156882] env[62522]: DEBUG nova.network.neutron [req-fab5d0b9-5ba4-4bdd-bf90-c23363e532fe req-1f1d5e48-427f-4266-9b31-bbe3f7fc0dc4 service nova] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Updating instance_info_cache with network_info: [{"id": "58444651-b47b-44d5-b240-53949c79df86", "address": "fa:16:3e:6c:81:68", "network": {"id": "fd993a8b-571c-4873-a5d6-4d8c60e23d38", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2084093882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "071dd4c295a54e388099d5bf0f4e300b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58444651-b4", "ovs_interfaceid": "58444651-b47b-44d5-b240-53949c79df86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1125.312923] env[62522]: DEBUG oslo_vmware.api [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416220, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476796} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.313236] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 93a2505e-814d-4809-90a9-0bc215406efd/93a2505e-814d-4809-90a9-0bc215406efd.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1125.313428] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1125.313681] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0ff420d2-6453-44c3-bd1e-dbb106b3e1b0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.325138] env[62522]: DEBUG oslo_vmware.api [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 1125.325138] env[62522]: value = "task-2416221" [ 1125.325138] env[62522]: _type = "Task" [ 1125.325138] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.333367] env[62522]: DEBUG oslo_vmware.api [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416221, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.476348] env[62522]: DEBUG oslo_vmware.api [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416216, 'name': PowerOnVM_Task, 'duration_secs': 1.261837} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.477040] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1125.501960] env[62522]: DEBUG oslo_vmware.api [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d9f0d2-6ce9-9b8f-c8e4-58842d732bb5, 'name': SearchDatastore_Task, 'duration_secs': 0.057222} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.502346] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1125.502669] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1125.502943] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1125.503141] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1125.503366] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1125.503674] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-50100863-20e5-40ab-a91d-ac2d93be88b1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.515107] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1125.515107] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1125.516159] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27697d27-506a-4b58-8643-75a33bbb428d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.524100] env[62522]: DEBUG oslo_vmware.api [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1125.524100] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bee460-7a04-5e0b-fa4e-637b1ee10762" [ 1125.524100] env[62522]: _type = "Task" [ 1125.524100] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.532766] env[62522]: DEBUG oslo_vmware.api [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bee460-7a04-5e0b-fa4e-637b1ee10762, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.599968] env[62522]: DEBUG nova.compute.manager [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1125.600972] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fae81ff9-05a1-4d7f-b6da-113930695123 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.661800] env[62522]: DEBUG oslo_concurrency.lockutils [req-fab5d0b9-5ba4-4bdd-bf90-c23363e532fe req-1f1d5e48-427f-4266-9b31-bbe3f7fc0dc4 service nova] Releasing lock "refresh_cache-b31195c2-29f4-475c-baa7-fcb4791b7278" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1125.711542] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1839b79-b8fe-4328-9c14-b22df379afd7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.721546] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d29a1e31-488c-4462-b381-4c7df14d0058 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.755063] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad918df6-92a7-4b98-9c8d-e0a23f9a9159 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.763024] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-795f4ba8-9448-4617-b738-21b4cb4d7e91 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.776781] env[62522]: DEBUG nova.compute.provider_tree [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1125.832547] env[62522]: DEBUG oslo_vmware.api [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416221, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.218995} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.832773] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1125.833570] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd0c43f-bfed-49ac-8f93-35887138a2b6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.855224] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] 93a2505e-814d-4809-90a9-0bc215406efd/93a2505e-814d-4809-90a9-0bc215406efd.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1125.858644] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e08fcae5-26c4-4f4f-8727-8227eca68798 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.879272] env[62522]: DEBUG oslo_vmware.api [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 1125.879272] env[62522]: value = "task-2416222" [ 1125.879272] env[62522]: _type = "Task" [ 1125.879272] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.888193] env[62522]: DEBUG oslo_vmware.api [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416222, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.015894] env[62522]: DEBUG nova.network.neutron [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Updating instance_info_cache with network_info: [{"id": "661819ce-17f6-47b5-a704-1c8c43e50373", "address": "fa:16:3e:60:76:34", "network": {"id": "2a4f6f01-ad28-4f8f-b835-ea86e1791f49", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1577320995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82346c440c3343a0a5c233a48203a13c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap661819ce-17", "ovs_interfaceid": "661819ce-17f6-47b5-a704-1c8c43e50373", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1126.038387] env[62522]: DEBUG oslo_vmware.api [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52bee460-7a04-5e0b-fa4e-637b1ee10762, 'name': SearchDatastore_Task, 'duration_secs': 0.009734} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.039316] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1154e1ae-1c96-4ee7-95b7-f8a389c0ed6f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.046364] env[62522]: DEBUG oslo_vmware.api [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1126.046364] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522557b9-16e7-1304-2246-5448fcf75166" [ 1126.046364] env[62522]: _type = "Task" [ 1126.046364] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.056244] env[62522]: DEBUG oslo_vmware.api [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522557b9-16e7-1304-2246-5448fcf75166, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.127998] env[62522]: DEBUG oslo_concurrency.lockutils [None req-35551cca-b937-4add-a0f2-9f7faa839e1e tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "c28d2907-5b59-4df8-91a8-4ba0f2047d89" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 38.221s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.191439] env[62522]: DEBUG nova.compute.manager [req-633343c8-d665-4378-9811-cdc80ae37d46 req-9a5caa01-ac1d-40c1-882f-8c2e7cbe1891 service nova] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Received event network-changed-608eb061-5051-4459-a45a-6359abaf3221 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1126.191439] env[62522]: DEBUG nova.compute.manager [req-633343c8-d665-4378-9811-cdc80ae37d46 req-9a5caa01-ac1d-40c1-882f-8c2e7cbe1891 service nova] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Refreshing instance network info cache due to event network-changed-608eb061-5051-4459-a45a-6359abaf3221. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1126.191439] env[62522]: DEBUG oslo_concurrency.lockutils [req-633343c8-d665-4378-9811-cdc80ae37d46 req-9a5caa01-ac1d-40c1-882f-8c2e7cbe1891 service nova] Acquiring lock "refresh_cache-981a4839-28d0-4d91-88cd-99c1d263ca4d" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1126.191439] env[62522]: DEBUG oslo_concurrency.lockutils [req-633343c8-d665-4378-9811-cdc80ae37d46 req-9a5caa01-ac1d-40c1-882f-8c2e7cbe1891 service nova] Acquired lock "refresh_cache-981a4839-28d0-4d91-88cd-99c1d263ca4d" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.191439] env[62522]: DEBUG nova.network.neutron [req-633343c8-d665-4378-9811-cdc80ae37d46 req-9a5caa01-ac1d-40c1-882f-8c2e7cbe1891 service nova] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Refreshing network info cache for port 608eb061-5051-4459-a45a-6359abaf3221 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1126.280242] env[62522]: DEBUG nova.scheduler.client.report [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1126.390223] env[62522]: DEBUG oslo_vmware.api [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416222, 'name': ReconfigVM_Task, 'duration_secs': 0.298814} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.390223] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Reconfigured VM instance instance-00000062 to attach disk [datastore2] 93a2505e-814d-4809-90a9-0bc215406efd/93a2505e-814d-4809-90a9-0bc215406efd.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1126.390549] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-728e508d-a728-47c2-ad4d-5c2ce7e6832d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.399702] env[62522]: DEBUG oslo_vmware.api [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 1126.399702] env[62522]: value = "task-2416223" [ 1126.399702] env[62522]: _type = "Task" [ 1126.399702] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.408029] env[62522]: DEBUG oslo_vmware.api [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416223, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.519529] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Releasing lock "refresh_cache-7f8a8270-5014-446c-aa42-ea0b4079e5a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1126.556910] env[62522]: DEBUG oslo_vmware.api [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522557b9-16e7-1304-2246-5448fcf75166, 'name': SearchDatastore_Task, 'duration_secs': 0.010865} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.557185] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1126.557447] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] b31195c2-29f4-475c-baa7-fcb4791b7278/b31195c2-29f4-475c-baa7-fcb4791b7278.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1126.557705] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ee7689eb-88fd-4189-a9bf-72f0fcdbfdf4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.563977] env[62522]: DEBUG oslo_vmware.api [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1126.563977] env[62522]: value = "task-2416224" [ 1126.563977] env[62522]: _type = "Task" [ 1126.563977] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.571962] env[62522]: DEBUG oslo_vmware.api [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416224, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.787402] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.259s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.788018] env[62522]: DEBUG nova.compute.manager [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1126.791042] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e2e922f6-87b0-4c66-b804-dc81e10b8897 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.811s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1126.791042] env[62522]: DEBUG nova.objects.instance [None req-e2e922f6-87b0-4c66-b804-dc81e10b8897 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lazy-loading 'resources' on Instance uuid 5426087f-3dd0-4796-aa46-6020a3bda4f5 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1126.909649] env[62522]: DEBUG oslo_vmware.api [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416223, 'name': Rename_Task, 'duration_secs': 0.153459} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.910126] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1126.910233] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-13fd89e4-3548-408f-b588-3c448ddd1725 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.918045] env[62522]: DEBUG oslo_vmware.api [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 1126.918045] env[62522]: value = "task-2416225" [ 1126.918045] env[62522]: _type = "Task" [ 1126.918045] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.931872] env[62522]: DEBUG oslo_vmware.api [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416225, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.966342] env[62522]: DEBUG nova.network.neutron [req-633343c8-d665-4378-9811-cdc80ae37d46 req-9a5caa01-ac1d-40c1-882f-8c2e7cbe1891 service nova] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Updated VIF entry in instance network info cache for port 608eb061-5051-4459-a45a-6359abaf3221. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1126.966782] env[62522]: DEBUG nova.network.neutron [req-633343c8-d665-4378-9811-cdc80ae37d46 req-9a5caa01-ac1d-40c1-882f-8c2e7cbe1891 service nova] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Updating instance_info_cache with network_info: [{"id": "608eb061-5051-4459-a45a-6359abaf3221", "address": "fa:16:3e:5c:25:a1", "network": {"id": "f04169bc-47cf-4a4e-9c24-43bce1d74613", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1139239981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d2a0f07d07c841ddbab81783fcf577e9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c20f5114-0866-45b3-9a7c-62f113ff83fa", "external-id": "nsx-vlan-transportzone-47", "segmentation_id": 47, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap608eb061-50", "ovs_interfaceid": "608eb061-5051-4459-a45a-6359abaf3221", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1127.075283] env[62522]: DEBUG oslo_vmware.api [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416224, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.472052} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.075561] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] b31195c2-29f4-475c-baa7-fcb4791b7278/b31195c2-29f4-475c-baa7-fcb4791b7278.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1127.075780] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1127.076045] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1aab3100-bc83-4c09-b44f-5bb4d66887c4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.083617] env[62522]: DEBUG oslo_vmware.api [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1127.083617] env[62522]: value = "task-2416226" [ 1127.083617] env[62522]: _type = "Task" [ 1127.083617] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.090728] env[62522]: DEBUG oslo_vmware.api [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416226, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.295486] env[62522]: DEBUG nova.compute.utils [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1127.299824] env[62522]: DEBUG nova.compute.manager [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1127.300020] env[62522]: DEBUG nova.network.neutron [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1127.341331] env[62522]: DEBUG nova.policy [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f23d9647c4874ea0b3e6a6abf9d6202f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '686854cd52ce4809a4d315275260da54', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1127.431395] env[62522]: DEBUG oslo_vmware.api [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416225, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.458344] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b493d02e-7791-423c-87b6-ed0cd261f91a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.465746] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa0cc13f-b85e-4654-87dd-8f4df38d8e52 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.469214] env[62522]: DEBUG oslo_concurrency.lockutils [req-633343c8-d665-4378-9811-cdc80ae37d46 req-9a5caa01-ac1d-40c1-882f-8c2e7cbe1891 service nova] Releasing lock "refresh_cache-981a4839-28d0-4d91-88cd-99c1d263ca4d" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1127.497403] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a78a1ff-f3c7-40a9-8117-192e45a7d21c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.505296] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa4b1cf9-f08e-4c81-b8a7-1145f05b5577 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.519660] env[62522]: DEBUG nova.compute.provider_tree [None req-e2e922f6-87b0-4c66-b804-dc81e10b8897 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1127.593247] env[62522]: DEBUG oslo_vmware.api [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416226, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063545} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.596876] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1127.596876] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b0299e4-28cc-492b-9c24-c379e826ba18 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.618436] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] b31195c2-29f4-475c-baa7-fcb4791b7278/b31195c2-29f4-475c-baa7-fcb4791b7278.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1127.618679] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-764cb26f-3852-4af8-b3b5-75f042da6891 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.640513] env[62522]: DEBUG oslo_vmware.api [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1127.640513] env[62522]: value = "task-2416227" [ 1127.640513] env[62522]: _type = "Task" [ 1127.640513] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.654630] env[62522]: DEBUG oslo_vmware.api [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416227, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.655708] env[62522]: DEBUG nova.network.neutron [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Successfully created port: 2d7b03e9-5319-496c-b990-7663aa7aa371 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1127.800418] env[62522]: DEBUG nova.compute.manager [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1127.927614] env[62522]: DEBUG oslo_vmware.api [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416225, 'name': PowerOnVM_Task, 'duration_secs': 0.526142} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.927888] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1127.928153] env[62522]: INFO nova.compute.manager [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Took 7.82 seconds to spawn the instance on the hypervisor. [ 1127.928348] env[62522]: DEBUG nova.compute.manager [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1127.929088] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e212031c-9180-47fb-b62a-ef54acb8da29 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.023917] env[62522]: DEBUG nova.scheduler.client.report [None req-e2e922f6-87b0-4c66-b804-dc81e10b8897 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1128.034760] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43d8d41f-f4df-4d83-af20-78b1a5ebb92e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.054014] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Updating instance '7f8a8270-5014-446c-aa42-ea0b4079e5a9' progress to 0 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1128.152987] env[62522]: DEBUG oslo_vmware.api [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416227, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.447441] env[62522]: INFO nova.compute.manager [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Took 20.21 seconds to build instance. [ 1128.528547] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e2e922f6-87b0-4c66-b804-dc81e10b8897 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.737s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1128.530955] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.912s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1128.532610] env[62522]: INFO nova.compute.claims [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1128.559704] env[62522]: INFO nova.scheduler.client.report [None req-e2e922f6-87b0-4c66-b804-dc81e10b8897 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Deleted allocations for instance 5426087f-3dd0-4796-aa46-6020a3bda4f5 [ 1128.563701] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1128.565026] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cc6fe8fa-e967-4901-9c2f-16faaebb492d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.578989] env[62522]: DEBUG oslo_vmware.api [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1128.578989] env[62522]: value = "task-2416228" [ 1128.578989] env[62522]: _type = "Task" [ 1128.578989] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.589758] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] VM already powered off {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1128.589957] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Updating instance '7f8a8270-5014-446c-aa42-ea0b4079e5a9' progress to 17 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1128.653032] env[62522]: DEBUG oslo_vmware.api [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416227, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.809793] env[62522]: DEBUG nova.compute.manager [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1128.835014] env[62522]: DEBUG nova.virt.hardware [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1128.835285] env[62522]: DEBUG nova.virt.hardware [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1128.835447] env[62522]: DEBUG nova.virt.hardware [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1128.835649] env[62522]: DEBUG nova.virt.hardware [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1128.835824] env[62522]: DEBUG nova.virt.hardware [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1128.835978] env[62522]: DEBUG nova.virt.hardware [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1128.836205] env[62522]: DEBUG nova.virt.hardware [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1128.836365] env[62522]: DEBUG nova.virt.hardware [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1128.836531] env[62522]: DEBUG nova.virt.hardware [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1128.836691] env[62522]: DEBUG nova.virt.hardware [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1128.836870] env[62522]: DEBUG nova.virt.hardware [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1128.837722] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1110bc41-7870-46a2-939e-df0ff1a12e73 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.845910] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7501864b-c9b2-4220-8575-7d730169868c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.948260] env[62522]: DEBUG oslo_concurrency.lockutils [None req-59329c40-cb58-4b91-9292-c922997ce687 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "93a2505e-814d-4809-90a9-0bc215406efd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.726s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1129.028956] env[62522]: DEBUG nova.compute.manager [req-fd05d690-e1d6-4243-9cd5-ad34f5e0e457 req-63575cd3-4633-4540-ac2c-91c5bb06f156 service nova] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Received event network-vif-plugged-2d7b03e9-5319-496c-b990-7663aa7aa371 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1129.029102] env[62522]: DEBUG oslo_concurrency.lockutils [req-fd05d690-e1d6-4243-9cd5-ad34f5e0e457 req-63575cd3-4633-4540-ac2c-91c5bb06f156 service nova] Acquiring lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1129.029324] env[62522]: DEBUG oslo_concurrency.lockutils [req-fd05d690-e1d6-4243-9cd5-ad34f5e0e457 req-63575cd3-4633-4540-ac2c-91c5bb06f156 service nova] Lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1129.029750] env[62522]: DEBUG oslo_concurrency.lockutils [req-fd05d690-e1d6-4243-9cd5-ad34f5e0e457 req-63575cd3-4633-4540-ac2c-91c5bb06f156 service nova] Lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1129.029914] env[62522]: DEBUG nova.compute.manager [req-fd05d690-e1d6-4243-9cd5-ad34f5e0e457 req-63575cd3-4633-4540-ac2c-91c5bb06f156 service nova] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] No waiting events found dispatching network-vif-plugged-2d7b03e9-5319-496c-b990-7663aa7aa371 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1129.030239] env[62522]: WARNING nova.compute.manager [req-fd05d690-e1d6-4243-9cd5-ad34f5e0e457 req-63575cd3-4633-4540-ac2c-91c5bb06f156 service nova] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Received unexpected event network-vif-plugged-2d7b03e9-5319-496c-b990-7663aa7aa371 for instance with vm_state building and task_state spawning. [ 1129.070517] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e2e922f6-87b0-4c66-b804-dc81e10b8897 tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "5426087f-3dd0-4796-aa46-6020a3bda4f5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.148s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1129.095965] env[62522]: DEBUG nova.virt.hardware [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:04Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1129.096213] env[62522]: DEBUG nova.virt.hardware [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1129.096485] env[62522]: DEBUG nova.virt.hardware [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1129.096568] env[62522]: DEBUG nova.virt.hardware [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1129.096689] env[62522]: DEBUG nova.virt.hardware [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1129.096837] env[62522]: DEBUG nova.virt.hardware [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1129.097087] env[62522]: DEBUG nova.virt.hardware [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1129.097271] env[62522]: DEBUG nova.virt.hardware [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1129.097446] env[62522]: DEBUG nova.virt.hardware [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1129.097609] env[62522]: DEBUG nova.virt.hardware [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1129.097784] env[62522]: DEBUG nova.virt.hardware [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1129.103221] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9789429d-1314-49a9-8b44-ebcc976b368b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.120329] env[62522]: DEBUG oslo_vmware.api [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1129.120329] env[62522]: value = "task-2416229" [ 1129.120329] env[62522]: _type = "Task" [ 1129.120329] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.131710] env[62522]: DEBUG oslo_vmware.api [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416229, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.153101] env[62522]: DEBUG oslo_vmware.api [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416227, 'name': ReconfigVM_Task, 'duration_secs': 1.035213} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.153433] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Reconfigured VM instance instance-00000063 to attach disk [datastore2] b31195c2-29f4-475c-baa7-fcb4791b7278/b31195c2-29f4-475c-baa7-fcb4791b7278.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1129.154262] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-78d081e7-b2b0-4ad0-8f72-d9c541feed57 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.161326] env[62522]: DEBUG oslo_vmware.api [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1129.161326] env[62522]: value = "task-2416230" [ 1129.161326] env[62522]: _type = "Task" [ 1129.161326] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.172623] env[62522]: DEBUG oslo_vmware.api [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416230, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.326481] env[62522]: DEBUG nova.network.neutron [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Successfully updated port: 2d7b03e9-5319-496c-b990-7663aa7aa371 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1129.632042] env[62522]: DEBUG oslo_vmware.api [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416229, 'name': ReconfigVM_Task, 'duration_secs': 0.126857} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.632385] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Updating instance '7f8a8270-5014-446c-aa42-ea0b4079e5a9' progress to 33 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1129.673171] env[62522]: DEBUG oslo_vmware.api [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416230, 'name': Rename_Task, 'duration_secs': 0.143856} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.673461] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1129.673738] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fa8da276-f857-447b-a29f-f0b104e74c95 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.678804] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df0a111-6482-4ea4-a790-33670f941a45 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.682177] env[62522]: DEBUG oslo_vmware.api [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1129.682177] env[62522]: value = "task-2416231" [ 1129.682177] env[62522]: _type = "Task" [ 1129.682177] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.690697] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d922e1b7-2a5e-44cf-a381-3e0300dec548 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.696971] env[62522]: DEBUG oslo_vmware.api [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416231, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.726158] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-084c6603-37f3-4817-9723-891dc32b964e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.733623] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9bc7d05-9ef3-4004-8e14-f7d26c5bab3c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.747413] env[62522]: DEBUG nova.compute.provider_tree [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1129.830797] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquiring lock "refresh_cache-f3894644-eb7e-4a6d-9029-4cd30466d6f8" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1129.830863] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquired lock "refresh_cache-f3894644-eb7e-4a6d-9029-4cd30466d6f8" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1129.831040] env[62522]: DEBUG nova.network.neutron [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1129.870923] env[62522]: DEBUG nova.compute.manager [req-ea7a32c2-2f65-49d4-b06c-538a9f3da37d req-f401b981-ddb4-4b10-b5ca-eb41bc0aa83f service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Received event network-changed-55c5c37a-1605-4edb-957e-04160d41ff01 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1129.871521] env[62522]: DEBUG nova.compute.manager [req-ea7a32c2-2f65-49d4-b06c-538a9f3da37d req-f401b981-ddb4-4b10-b5ca-eb41bc0aa83f service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Refreshing instance network info cache due to event network-changed-55c5c37a-1605-4edb-957e-04160d41ff01. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1129.871777] env[62522]: DEBUG oslo_concurrency.lockutils [req-ea7a32c2-2f65-49d4-b06c-538a9f3da37d req-f401b981-ddb4-4b10-b5ca-eb41bc0aa83f service nova] Acquiring lock "refresh_cache-892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1129.871927] env[62522]: DEBUG oslo_concurrency.lockutils [req-ea7a32c2-2f65-49d4-b06c-538a9f3da37d req-f401b981-ddb4-4b10-b5ca-eb41bc0aa83f service nova] Acquired lock "refresh_cache-892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1129.872111] env[62522]: DEBUG nova.network.neutron [req-ea7a32c2-2f65-49d4-b06c-538a9f3da37d req-f401b981-ddb4-4b10-b5ca-eb41bc0aa83f service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Refreshing network info cache for port 55c5c37a-1605-4edb-957e-04160d41ff01 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1130.144174] env[62522]: DEBUG nova.virt.hardware [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1130.144174] env[62522]: DEBUG nova.virt.hardware [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1130.144174] env[62522]: DEBUG nova.virt.hardware [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1130.144174] env[62522]: DEBUG nova.virt.hardware [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1130.148057] env[62522]: DEBUG nova.virt.hardware [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1130.148057] env[62522]: DEBUG nova.virt.hardware [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1130.148057] env[62522]: DEBUG nova.virt.hardware [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1130.148057] env[62522]: DEBUG nova.virt.hardware [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1130.148057] env[62522]: DEBUG nova.virt.hardware [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1130.148302] env[62522]: DEBUG nova.virt.hardware [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1130.148335] env[62522]: DEBUG nova.virt.hardware [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1130.154462] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Reconfiguring VM instance instance-0000004d to detach disk 2000 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1130.155808] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-401bb3e5-b284-4b0b-bb1f-2df68b3c3777 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.177398] env[62522]: DEBUG oslo_vmware.api [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1130.177398] env[62522]: value = "task-2416232" [ 1130.177398] env[62522]: _type = "Task" [ 1130.177398] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.195715] env[62522]: DEBUG oslo_vmware.api [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416232, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.200175] env[62522]: DEBUG oslo_vmware.api [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416231, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.250668] env[62522]: DEBUG nova.scheduler.client.report [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1130.372266] env[62522]: DEBUG nova.network.neutron [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1130.575281] env[62522]: DEBUG nova.network.neutron [req-ea7a32c2-2f65-49d4-b06c-538a9f3da37d req-f401b981-ddb4-4b10-b5ca-eb41bc0aa83f service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Updated VIF entry in instance network info cache for port 55c5c37a-1605-4edb-957e-04160d41ff01. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1130.575664] env[62522]: DEBUG nova.network.neutron [req-ea7a32c2-2f65-49d4-b06c-538a9f3da37d req-f401b981-ddb4-4b10-b5ca-eb41bc0aa83f service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Updating instance_info_cache with network_info: [{"id": "55c5c37a-1605-4edb-957e-04160d41ff01", "address": "fa:16:3e:07:85:b9", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55c5c37a-16", "ovs_interfaceid": "55c5c37a-1605-4edb-957e-04160d41ff01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1130.616721] env[62522]: DEBUG nova.network.neutron [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Updating instance_info_cache with network_info: [{"id": "2d7b03e9-5319-496c-b990-7663aa7aa371", "address": "fa:16:3e:8a:f2:43", "network": {"id": "c3450427-ea7e-4a07-8399-53265d390e06", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1613138323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "686854cd52ce4809a4d315275260da54", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c42bb08a-77b4-4bba-8166-702cbb1b5f1e", "external-id": "nsx-vlan-transportzone-137", "segmentation_id": 137, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d7b03e9-53", "ovs_interfaceid": "2d7b03e9-5319-496c-b990-7663aa7aa371", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1130.619678] env[62522]: DEBUG oslo_concurrency.lockutils [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "ebca687d-4de7-4fd6-99fb-b4f0154abe9c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1130.619908] env[62522]: DEBUG oslo_concurrency.lockutils [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "ebca687d-4de7-4fd6-99fb-b4f0154abe9c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1130.620133] env[62522]: DEBUG oslo_concurrency.lockutils [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "ebca687d-4de7-4fd6-99fb-b4f0154abe9c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1130.620320] env[62522]: DEBUG oslo_concurrency.lockutils [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "ebca687d-4de7-4fd6-99fb-b4f0154abe9c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1130.620520] env[62522]: DEBUG oslo_concurrency.lockutils [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "ebca687d-4de7-4fd6-99fb-b4f0154abe9c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1130.622770] env[62522]: INFO nova.compute.manager [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Terminating instance [ 1130.690682] env[62522]: DEBUG oslo_vmware.api [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416232, 'name': ReconfigVM_Task, 'duration_secs': 0.172208} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.691355] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Reconfigured VM instance instance-0000004d to detach disk 2000 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1130.692138] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c63fc0f4-a833-4b27-b17a-025727551686 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.697501] env[62522]: DEBUG oslo_vmware.api [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416231, 'name': PowerOnVM_Task, 'duration_secs': 0.551474} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.699016] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1130.699016] env[62522]: INFO nova.compute.manager [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Took 8.25 seconds to spawn the instance on the hypervisor. [ 1130.699016] env[62522]: DEBUG nova.compute.manager [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1130.699231] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51628f0e-092c-42ca-a696-b4fe7a5f0075 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.725466] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] 7f8a8270-5014-446c-aa42-ea0b4079e5a9/7f8a8270-5014-446c-aa42-ea0b4079e5a9.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1130.725466] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b366b02e-870e-439b-a76c-dcedb3e69451 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.746043] env[62522]: DEBUG oslo_vmware.api [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1130.746043] env[62522]: value = "task-2416233" [ 1130.746043] env[62522]: _type = "Task" [ 1130.746043] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.754111] env[62522]: DEBUG oslo_vmware.api [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416233, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.756248] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.225s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1130.756533] env[62522]: DEBUG nova.compute.manager [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1130.759348] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 11.381s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1130.759348] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1130.759348] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62522) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1130.760572] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4911aa0b-2922-41fe-8a76-f3c7c2ccb4b4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.768312] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0be2b64-b1ff-48d7-9a02-dbe482d97458 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.787943] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff5ab583-b818-4cca-949e-c51e36190c15 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.796815] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-016f7a39-739a-4809-91cc-2174a0050de9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.830037] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180544MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=62522) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1130.830202] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1130.830427] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1131.060190] env[62522]: DEBUG nova.compute.manager [req-fd19627a-e4f2-4b11-a426-12b2d390648e req-f4704f7a-87f6-4fe7-92b3-3ff54f85d26f service nova] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Received event network-changed-2d7b03e9-5319-496c-b990-7663aa7aa371 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1131.060411] env[62522]: DEBUG nova.compute.manager [req-fd19627a-e4f2-4b11-a426-12b2d390648e req-f4704f7a-87f6-4fe7-92b3-3ff54f85d26f service nova] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Refreshing instance network info cache due to event network-changed-2d7b03e9-5319-496c-b990-7663aa7aa371. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1131.060625] env[62522]: DEBUG oslo_concurrency.lockutils [req-fd19627a-e4f2-4b11-a426-12b2d390648e req-f4704f7a-87f6-4fe7-92b3-3ff54f85d26f service nova] Acquiring lock "refresh_cache-f3894644-eb7e-4a6d-9029-4cd30466d6f8" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1131.081540] env[62522]: DEBUG oslo_concurrency.lockutils [req-ea7a32c2-2f65-49d4-b06c-538a9f3da37d req-f401b981-ddb4-4b10-b5ca-eb41bc0aa83f service nova] Releasing lock "refresh_cache-892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1131.119918] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Releasing lock "refresh_cache-f3894644-eb7e-4a6d-9029-4cd30466d6f8" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1131.120296] env[62522]: DEBUG nova.compute.manager [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Instance network_info: |[{"id": "2d7b03e9-5319-496c-b990-7663aa7aa371", "address": "fa:16:3e:8a:f2:43", "network": {"id": "c3450427-ea7e-4a07-8399-53265d390e06", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1613138323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "686854cd52ce4809a4d315275260da54", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c42bb08a-77b4-4bba-8166-702cbb1b5f1e", "external-id": "nsx-vlan-transportzone-137", "segmentation_id": 137, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d7b03e9-53", "ovs_interfaceid": "2d7b03e9-5319-496c-b990-7663aa7aa371", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1131.120658] env[62522]: DEBUG oslo_concurrency.lockutils [req-fd19627a-e4f2-4b11-a426-12b2d390648e req-f4704f7a-87f6-4fe7-92b3-3ff54f85d26f service nova] Acquired lock "refresh_cache-f3894644-eb7e-4a6d-9029-4cd30466d6f8" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1131.120824] env[62522]: DEBUG nova.network.neutron [req-fd19627a-e4f2-4b11-a426-12b2d390648e req-f4704f7a-87f6-4fe7-92b3-3ff54f85d26f service nova] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Refreshing network info cache for port 2d7b03e9-5319-496c-b990-7663aa7aa371 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1131.122116] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8a:f2:43', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c42bb08a-77b4-4bba-8166-702cbb1b5f1e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2d7b03e9-5319-496c-b990-7663aa7aa371', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1131.129657] env[62522]: DEBUG oslo.service.loopingcall [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1131.131026] env[62522]: DEBUG nova.compute.manager [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1131.132033] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1131.132221] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1131.133089] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15108189-a570-41d8-bcc5-2d90c0bdf37a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.139657] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f374c906-2870-4321-b5d9-cc75912d5ab5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.164383] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1131.165903] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0d00b8a2-73c2-4853-ac9f-6afb111d2f3c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.168235] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1131.168235] env[62522]: value = "task-2416234" [ 1131.168235] env[62522]: _type = "Task" [ 1131.168235] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.178779] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416234, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.180124] env[62522]: DEBUG oslo_vmware.api [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1131.180124] env[62522]: value = "task-2416235" [ 1131.180124] env[62522]: _type = "Task" [ 1131.180124] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.189574] env[62522]: DEBUG oslo_vmware.api [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416235, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.251430] env[62522]: INFO nova.compute.manager [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Took 19.06 seconds to build instance. [ 1131.258258] env[62522]: DEBUG oslo_vmware.api [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416233, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.261645] env[62522]: DEBUG nova.compute.utils [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1131.263054] env[62522]: DEBUG nova.compute.manager [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1131.263327] env[62522]: DEBUG nova.network.neutron [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1131.341264] env[62522]: DEBUG nova.policy [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f26eeb125397426baca60d80d635c4b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a06421250694a98b13ff34ad816dc75', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1131.678936] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416234, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.688771] env[62522]: DEBUG oslo_vmware.api [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416235, 'name': PowerOffVM_Task, 'duration_secs': 0.253877} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.688906] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1131.689123] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1131.689214] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-576c33fb-c800-49d1-9288-e3d594fb06ff {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.752812] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1131.753148] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1131.753273] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Deleting the datastore file [datastore1] ebca687d-4de7-4fd6-99fb-b4f0154abe9c {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1131.754039] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e59f1f78-fff0-435f-a3b0-c45a117c2d55 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "b31195c2-29f4-475c-baa7-fcb4791b7278" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.591s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1131.754268] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0806b679-e580-4297-9e3b-0267afb1726b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.761599] env[62522]: DEBUG oslo_vmware.api [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416233, 'name': ReconfigVM_Task, 'duration_secs': 0.661032} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.762488] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Reconfigured VM instance instance-0000004d to attach disk [datastore1] 7f8a8270-5014-446c-aa42-ea0b4079e5a9/7f8a8270-5014-446c-aa42-ea0b4079e5a9.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1131.762796] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Updating instance '7f8a8270-5014-446c-aa42-ea0b4079e5a9' progress to 50 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1131.767494] env[62522]: DEBUG nova.compute.manager [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1131.769912] env[62522]: DEBUG oslo_vmware.api [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for the task: (returnval){ [ 1131.769912] env[62522]: value = "task-2416237" [ 1131.769912] env[62522]: _type = "Task" [ 1131.769912] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.778270] env[62522]: DEBUG oslo_vmware.api [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416237, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.842164] env[62522]: DEBUG nova.network.neutron [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Successfully created port: 7eafd593-e029-4a97-afc2-234f1dd50f20 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1131.844644] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Applying migration context for instance 7f8a8270-5014-446c-aa42-ea0b4079e5a9 as it has an incoming, in-progress migration 2b15f686-afa5-4f5f-a5cd-77d737183ec3. Migration status is migrating {{(pid=62522) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1131.845927] env[62522]: INFO nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Updating resource usage from migration 2b15f686-afa5-4f5f-a5cd-77d737183ec3 [ 1131.849314] env[62522]: DEBUG nova.network.neutron [req-fd19627a-e4f2-4b11-a426-12b2d390648e req-f4704f7a-87f6-4fe7-92b3-3ff54f85d26f service nova] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Updated VIF entry in instance network info cache for port 2d7b03e9-5319-496c-b990-7663aa7aa371. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1131.849654] env[62522]: DEBUG nova.network.neutron [req-fd19627a-e4f2-4b11-a426-12b2d390648e req-f4704f7a-87f6-4fe7-92b3-3ff54f85d26f service nova] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Updating instance_info_cache with network_info: [{"id": "2d7b03e9-5319-496c-b990-7663aa7aa371", "address": "fa:16:3e:8a:f2:43", "network": {"id": "c3450427-ea7e-4a07-8399-53265d390e06", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1613138323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "686854cd52ce4809a4d315275260da54", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c42bb08a-77b4-4bba-8166-702cbb1b5f1e", "external-id": "nsx-vlan-transportzone-137", "segmentation_id": 137, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d7b03e9-53", "ovs_interfaceid": "2d7b03e9-5319-496c-b990-7663aa7aa371", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1131.871635] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance ebca687d-4de7-4fd6-99fb-b4f0154abe9c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1131.871806] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance c28d2907-5b59-4df8-91a8-4ba0f2047d89 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1131.871934] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 892926ef-3044-497c-8fc8-30cd298e4311 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1131.872073] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 981a4839-28d0-4d91-88cd-99c1d263ca4d actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1131.872197] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 93a2505e-814d-4809-90a9-0bc215406efd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1131.872316] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance b31195c2-29f4-475c-baa7-fcb4791b7278 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1131.872433] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Migration 2b15f686-afa5-4f5f-a5cd-77d737183ec3 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1131.872628] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 7f8a8270-5014-446c-aa42-ea0b4079e5a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1131.872773] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance f3894644-eb7e-4a6d-9029-4cd30466d6f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1131.872894] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 80dd48b7-09fb-4127-af11-b2d52a49ca12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1131.873111] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62522) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1131.873343] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2496MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62522) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1131.902585] env[62522]: DEBUG nova.compute.manager [req-b3197bf0-0d31-4018-a3e0-558878dc7741 req-217f509b-315c-4c5f-8094-33822e8c019e service nova] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Received event network-changed-43d86dfd-5c95-438b-808b-91ab1078323b {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1131.902817] env[62522]: DEBUG nova.compute.manager [req-b3197bf0-0d31-4018-a3e0-558878dc7741 req-217f509b-315c-4c5f-8094-33822e8c019e service nova] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Refreshing instance network info cache due to event network-changed-43d86dfd-5c95-438b-808b-91ab1078323b. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1131.903038] env[62522]: DEBUG oslo_concurrency.lockutils [req-b3197bf0-0d31-4018-a3e0-558878dc7741 req-217f509b-315c-4c5f-8094-33822e8c019e service nova] Acquiring lock "refresh_cache-93a2505e-814d-4809-90a9-0bc215406efd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1131.903186] env[62522]: DEBUG oslo_concurrency.lockutils [req-b3197bf0-0d31-4018-a3e0-558878dc7741 req-217f509b-315c-4c5f-8094-33822e8c019e service nova] Acquired lock "refresh_cache-93a2505e-814d-4809-90a9-0bc215406efd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1131.903347] env[62522]: DEBUG nova.network.neutron [req-b3197bf0-0d31-4018-a3e0-558878dc7741 req-217f509b-315c-4c5f-8094-33822e8c019e service nova] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Refreshing network info cache for port 43d86dfd-5c95-438b-808b-91ab1078323b {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1132.034635] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae341b4-b779-4607-8043-584cc9aeb690 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.042620] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd10fc67-009f-4d19-9d4b-e94b16fb0c01 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.076187] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8940e9ad-ff24-4c61-b11c-b329eb8c2962 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.084008] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1334024-a37a-4c9e-a2e0-7943f9ce9dd9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.098783] env[62522]: DEBUG nova.compute.provider_tree [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1132.178918] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416234, 'name': CreateVM_Task, 'duration_secs': 0.884254} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.179126] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1132.180120] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1132.180120] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.180392] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1132.180648] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b19ef740-5330-4272-ba09-958f06a12bb2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.185323] env[62522]: DEBUG oslo_vmware.api [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 1132.185323] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529931e5-84ea-868a-6f1c-b58cf0e847c9" [ 1132.185323] env[62522]: _type = "Task" [ 1132.185323] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.193403] env[62522]: DEBUG oslo_vmware.api [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529931e5-84ea-868a-6f1c-b58cf0e847c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.276482] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bb8f0a9-4257-4743-a10b-d9bc26305870 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.287881] env[62522]: DEBUG oslo_vmware.api [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416237, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.307020] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-617abc14-392d-4d0a-beb5-ac4737c4faf2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.327016] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Updating instance '7f8a8270-5014-446c-aa42-ea0b4079e5a9' progress to 67 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1132.352941] env[62522]: DEBUG oslo_concurrency.lockutils [req-fd19627a-e4f2-4b11-a426-12b2d390648e req-f4704f7a-87f6-4fe7-92b3-3ff54f85d26f service nova] Releasing lock "refresh_cache-f3894644-eb7e-4a6d-9029-4cd30466d6f8" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1132.606063] env[62522]: DEBUG nova.scheduler.client.report [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1132.673965] env[62522]: DEBUG nova.network.neutron [req-b3197bf0-0d31-4018-a3e0-558878dc7741 req-217f509b-315c-4c5f-8094-33822e8c019e service nova] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Updated VIF entry in instance network info cache for port 43d86dfd-5c95-438b-808b-91ab1078323b. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1132.674361] env[62522]: DEBUG nova.network.neutron [req-b3197bf0-0d31-4018-a3e0-558878dc7741 req-217f509b-315c-4c5f-8094-33822e8c019e service nova] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Updating instance_info_cache with network_info: [{"id": "43d86dfd-5c95-438b-808b-91ab1078323b", "address": "fa:16:3e:ee:37:0f", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43d86dfd-5c", "ovs_interfaceid": "43d86dfd-5c95-438b-808b-91ab1078323b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1132.696588] env[62522]: DEBUG oslo_vmware.api [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529931e5-84ea-868a-6f1c-b58cf0e847c9, 'name': SearchDatastore_Task, 'duration_secs': 0.022217} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.696870] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1132.697113] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1132.697343] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1132.697488] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.697666] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1132.697922] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9a076b62-a69b-446b-995e-19ac73ff14ba {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.709342] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1132.709528] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1132.710248] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eed9c1cc-97f4-445d-9340-2bd972fb043a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.715019] env[62522]: DEBUG oslo_vmware.api [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 1132.715019] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521c3f80-eaaa-2faf-e3c9-b8ceb8580156" [ 1132.715019] env[62522]: _type = "Task" [ 1132.715019] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.723500] env[62522]: DEBUG oslo_vmware.api [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521c3f80-eaaa-2faf-e3c9-b8ceb8580156, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.779726] env[62522]: DEBUG nova.compute.manager [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1132.790351] env[62522]: DEBUG oslo_vmware.api [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Task: {'id': task-2416237, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.528334} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.790697] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1132.790888] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1132.791074] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1132.791252] env[62522]: INFO nova.compute.manager [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Took 1.66 seconds to destroy the instance on the hypervisor. [ 1132.791493] env[62522]: DEBUG oslo.service.loopingcall [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1132.791686] env[62522]: DEBUG nova.compute.manager [-] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1132.791783] env[62522]: DEBUG nova.network.neutron [-] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1132.808474] env[62522]: DEBUG nova.virt.hardware [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1132.809209] env[62522]: DEBUG nova.virt.hardware [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1132.809209] env[62522]: DEBUG nova.virt.hardware [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1132.809209] env[62522]: DEBUG nova.virt.hardware [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1132.809374] env[62522]: DEBUG nova.virt.hardware [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1132.809438] env[62522]: DEBUG nova.virt.hardware [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1132.809702] env[62522]: DEBUG nova.virt.hardware [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1132.809883] env[62522]: DEBUG nova.virt.hardware [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1132.810078] env[62522]: DEBUG nova.virt.hardware [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1132.810317] env[62522]: DEBUG nova.virt.hardware [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1132.810629] env[62522]: DEBUG nova.virt.hardware [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1132.811471] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a5bed3-e302-41b9-94ab-3ceff92052ff {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.819902] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6c282f7-1627-4cc1-b81c-75b705c90a86 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.894534] env[62522]: DEBUG nova.network.neutron [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Port 661819ce-17f6-47b5-a704-1c8c43e50373 binding to destination host cpu-1 is already ACTIVE {{(pid=62522) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1133.099173] env[62522]: DEBUG nova.compute.manager [req-069080ce-3f46-4288-a345-61ec616ec857 req-9d607149-c925-4223-86a8-2542b46ce4f0 service nova] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Received event network-changed-43d86dfd-5c95-438b-808b-91ab1078323b {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1133.099173] env[62522]: DEBUG nova.compute.manager [req-069080ce-3f46-4288-a345-61ec616ec857 req-9d607149-c925-4223-86a8-2542b46ce4f0 service nova] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Refreshing instance network info cache due to event network-changed-43d86dfd-5c95-438b-808b-91ab1078323b. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1133.099277] env[62522]: DEBUG oslo_concurrency.lockutils [req-069080ce-3f46-4288-a345-61ec616ec857 req-9d607149-c925-4223-86a8-2542b46ce4f0 service nova] Acquiring lock "refresh_cache-93a2505e-814d-4809-90a9-0bc215406efd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1133.111247] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62522) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1133.111338] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.281s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.177214] env[62522]: DEBUG oslo_concurrency.lockutils [req-b3197bf0-0d31-4018-a3e0-558878dc7741 req-217f509b-315c-4c5f-8094-33822e8c019e service nova] Releasing lock "refresh_cache-93a2505e-814d-4809-90a9-0bc215406efd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1133.177966] env[62522]: DEBUG oslo_concurrency.lockutils [req-069080ce-3f46-4288-a345-61ec616ec857 req-9d607149-c925-4223-86a8-2542b46ce4f0 service nova] Acquired lock "refresh_cache-93a2505e-814d-4809-90a9-0bc215406efd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.177966] env[62522]: DEBUG nova.network.neutron [req-069080ce-3f46-4288-a345-61ec616ec857 req-9d607149-c925-4223-86a8-2542b46ce4f0 service nova] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Refreshing network info cache for port 43d86dfd-5c95-438b-808b-91ab1078323b {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1133.228068] env[62522]: DEBUG oslo_vmware.api [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521c3f80-eaaa-2faf-e3c9-b8ceb8580156, 'name': SearchDatastore_Task, 'duration_secs': 0.04147} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.229395] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e231671-fa9a-4b47-aca2-54094f7d7f3a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.235387] env[62522]: DEBUG oslo_vmware.api [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 1133.235387] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c14b83-f97f-ef29-97fd-bd7091d02d0c" [ 1133.235387] env[62522]: _type = "Task" [ 1133.235387] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.243115] env[62522]: DEBUG oslo_vmware.api [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c14b83-f97f-ef29-97fd-bd7091d02d0c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.336083] env[62522]: DEBUG nova.network.neutron [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Successfully updated port: 7eafd593-e029-4a97-afc2-234f1dd50f20 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1133.542957] env[62522]: DEBUG nova.network.neutron [-] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1133.745275] env[62522]: DEBUG oslo_vmware.api [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c14b83-f97f-ef29-97fd-bd7091d02d0c, 'name': SearchDatastore_Task, 'duration_secs': 0.044418} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.745539] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1133.745807] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] f3894644-eb7e-4a6d-9029-4cd30466d6f8/f3894644-eb7e-4a6d-9029-4cd30466d6f8.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1133.746085] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b542965e-d35e-48cf-a31d-b95c82731e78 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.753256] env[62522]: DEBUG oslo_vmware.api [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 1133.753256] env[62522]: value = "task-2416238" [ 1133.753256] env[62522]: _type = "Task" [ 1133.753256] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.763388] env[62522]: DEBUG oslo_vmware.api [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416238, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.839502] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "refresh_cache-80dd48b7-09fb-4127-af11-b2d52a49ca12" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1133.839777] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquired lock "refresh_cache-80dd48b7-09fb-4127-af11-b2d52a49ca12" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.839880] env[62522]: DEBUG nova.network.neutron [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1133.887911] env[62522]: DEBUG nova.network.neutron [req-069080ce-3f46-4288-a345-61ec616ec857 req-9d607149-c925-4223-86a8-2542b46ce4f0 service nova] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Updated VIF entry in instance network info cache for port 43d86dfd-5c95-438b-808b-91ab1078323b. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1133.888299] env[62522]: DEBUG nova.network.neutron [req-069080ce-3f46-4288-a345-61ec616ec857 req-9d607149-c925-4223-86a8-2542b46ce4f0 service nova] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Updating instance_info_cache with network_info: [{"id": "43d86dfd-5c95-438b-808b-91ab1078323b", "address": "fa:16:3e:ee:37:0f", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43d86dfd-5c", "ovs_interfaceid": "43d86dfd-5c95-438b-808b-91ab1078323b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1133.912365] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "7f8a8270-5014-446c-aa42-ea0b4079e5a9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.912613] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "7f8a8270-5014-446c-aa42-ea0b4079e5a9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1133.912802] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "7f8a8270-5014-446c-aa42-ea0b4079e5a9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.929039] env[62522]: DEBUG nova.compute.manager [req-0dfe81a2-cae2-4d26-8b4b-132a0e2ab4ba req-abbf16c5-bedc-401b-a788-a0761e4d2b1e service nova] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Received event network-vif-deleted-9f1e209f-6bc2-4b96-9c5d-830ee01139b8 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1134.045602] env[62522]: INFO nova.compute.manager [-] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Took 1.25 seconds to deallocate network for instance. [ 1134.263291] env[62522]: DEBUG oslo_vmware.api [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416238, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.370042] env[62522]: DEBUG nova.network.neutron [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1134.391149] env[62522]: DEBUG oslo_concurrency.lockutils [req-069080ce-3f46-4288-a345-61ec616ec857 req-9d607149-c925-4223-86a8-2542b46ce4f0 service nova] Releasing lock "refresh_cache-93a2505e-814d-4809-90a9-0bc215406efd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1134.391484] env[62522]: DEBUG nova.compute.manager [req-069080ce-3f46-4288-a345-61ec616ec857 req-9d607149-c925-4223-86a8-2542b46ce4f0 service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Received event network-changed-55c5c37a-1605-4edb-957e-04160d41ff01 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1134.391668] env[62522]: DEBUG nova.compute.manager [req-069080ce-3f46-4288-a345-61ec616ec857 req-9d607149-c925-4223-86a8-2542b46ce4f0 service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Refreshing instance network info cache due to event network-changed-55c5c37a-1605-4edb-957e-04160d41ff01. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1134.391887] env[62522]: DEBUG oslo_concurrency.lockutils [req-069080ce-3f46-4288-a345-61ec616ec857 req-9d607149-c925-4223-86a8-2542b46ce4f0 service nova] Acquiring lock "refresh_cache-892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1134.392045] env[62522]: DEBUG oslo_concurrency.lockutils [req-069080ce-3f46-4288-a345-61ec616ec857 req-9d607149-c925-4223-86a8-2542b46ce4f0 service nova] Acquired lock "refresh_cache-892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.392217] env[62522]: DEBUG nova.network.neutron [req-069080ce-3f46-4288-a345-61ec616ec857 req-9d607149-c925-4223-86a8-2542b46ce4f0 service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Refreshing network info cache for port 55c5c37a-1605-4edb-957e-04160d41ff01 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1134.501771] env[62522]: DEBUG nova.network.neutron [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Updating instance_info_cache with network_info: [{"id": "7eafd593-e029-4a97-afc2-234f1dd50f20", "address": "fa:16:3e:33:2b:5e", "network": {"id": "21d0ca22-5509-4f9b-b167-f9fde2dac808", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-547933771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a06421250694a98b13ff34ad816dc75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7eafd593-e0", "ovs_interfaceid": "7eafd593-e029-4a97-afc2-234f1dd50f20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1134.552142] env[62522]: DEBUG oslo_concurrency.lockutils [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1134.552498] env[62522]: DEBUG oslo_concurrency.lockutils [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1134.552844] env[62522]: DEBUG nova.objects.instance [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lazy-loading 'resources' on Instance uuid ebca687d-4de7-4fd6-99fb-b4f0154abe9c {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1134.763802] env[62522]: DEBUG oslo_vmware.api [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416238, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.955788] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "refresh_cache-7f8a8270-5014-446c-aa42-ea0b4079e5a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1134.956123] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquired lock "refresh_cache-7f8a8270-5014-446c-aa42-ea0b4079e5a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.956180] env[62522]: DEBUG nova.network.neutron [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1135.005143] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Releasing lock "refresh_cache-80dd48b7-09fb-4127-af11-b2d52a49ca12" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1135.005446] env[62522]: DEBUG nova.compute.manager [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Instance network_info: |[{"id": "7eafd593-e029-4a97-afc2-234f1dd50f20", "address": "fa:16:3e:33:2b:5e", "network": {"id": "21d0ca22-5509-4f9b-b167-f9fde2dac808", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-547933771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a06421250694a98b13ff34ad816dc75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7eafd593-e0", "ovs_interfaceid": "7eafd593-e029-4a97-afc2-234f1dd50f20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1135.005851] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:2b:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24144f5a-050a-4f1e-8d8c-774dc16dc791', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7eafd593-e029-4a97-afc2-234f1dd50f20', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1135.013622] env[62522]: DEBUG oslo.service.loopingcall [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1135.013832] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1135.014526] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2711a4f5-86b2-4dea-accf-281b4fb68cb5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.036922] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1135.036922] env[62522]: value = "task-2416239" [ 1135.036922] env[62522]: _type = "Task" [ 1135.036922] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.044422] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416239, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.123812] env[62522]: DEBUG nova.compute.manager [req-a92f48e3-b5f9-4f32-88c9-a4fcb3e67b68 req-7a43877a-e83f-40ea-95b5-88c7ebc49109 service nova] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Received event network-vif-plugged-7eafd593-e029-4a97-afc2-234f1dd50f20 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1135.124070] env[62522]: DEBUG oslo_concurrency.lockutils [req-a92f48e3-b5f9-4f32-88c9-a4fcb3e67b68 req-7a43877a-e83f-40ea-95b5-88c7ebc49109 service nova] Acquiring lock "80dd48b7-09fb-4127-af11-b2d52a49ca12-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1135.124289] env[62522]: DEBUG oslo_concurrency.lockutils [req-a92f48e3-b5f9-4f32-88c9-a4fcb3e67b68 req-7a43877a-e83f-40ea-95b5-88c7ebc49109 service nova] Lock "80dd48b7-09fb-4127-af11-b2d52a49ca12-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1135.124462] env[62522]: DEBUG oslo_concurrency.lockutils [req-a92f48e3-b5f9-4f32-88c9-a4fcb3e67b68 req-7a43877a-e83f-40ea-95b5-88c7ebc49109 service nova] Lock "80dd48b7-09fb-4127-af11-b2d52a49ca12-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1135.124741] env[62522]: DEBUG nova.compute.manager [req-a92f48e3-b5f9-4f32-88c9-a4fcb3e67b68 req-7a43877a-e83f-40ea-95b5-88c7ebc49109 service nova] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] No waiting events found dispatching network-vif-plugged-7eafd593-e029-4a97-afc2-234f1dd50f20 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1135.124799] env[62522]: WARNING nova.compute.manager [req-a92f48e3-b5f9-4f32-88c9-a4fcb3e67b68 req-7a43877a-e83f-40ea-95b5-88c7ebc49109 service nova] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Received unexpected event network-vif-plugged-7eafd593-e029-4a97-afc2-234f1dd50f20 for instance with vm_state building and task_state spawning. [ 1135.124960] env[62522]: DEBUG nova.compute.manager [req-a92f48e3-b5f9-4f32-88c9-a4fcb3e67b68 req-7a43877a-e83f-40ea-95b5-88c7ebc49109 service nova] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Received event network-changed-7eafd593-e029-4a97-afc2-234f1dd50f20 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1135.125135] env[62522]: DEBUG nova.compute.manager [req-a92f48e3-b5f9-4f32-88c9-a4fcb3e67b68 req-7a43877a-e83f-40ea-95b5-88c7ebc49109 service nova] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Refreshing instance network info cache due to event network-changed-7eafd593-e029-4a97-afc2-234f1dd50f20. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1135.125319] env[62522]: DEBUG oslo_concurrency.lockutils [req-a92f48e3-b5f9-4f32-88c9-a4fcb3e67b68 req-7a43877a-e83f-40ea-95b5-88c7ebc49109 service nova] Acquiring lock "refresh_cache-80dd48b7-09fb-4127-af11-b2d52a49ca12" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1135.125454] env[62522]: DEBUG oslo_concurrency.lockutils [req-a92f48e3-b5f9-4f32-88c9-a4fcb3e67b68 req-7a43877a-e83f-40ea-95b5-88c7ebc49109 service nova] Acquired lock "refresh_cache-80dd48b7-09fb-4127-af11-b2d52a49ca12" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1135.125610] env[62522]: DEBUG nova.network.neutron [req-a92f48e3-b5f9-4f32-88c9-a4fcb3e67b68 req-7a43877a-e83f-40ea-95b5-88c7ebc49109 service nova] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Refreshing network info cache for port 7eafd593-e029-4a97-afc2-234f1dd50f20 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1135.130202] env[62522]: DEBUG nova.network.neutron [req-069080ce-3f46-4288-a345-61ec616ec857 req-9d607149-c925-4223-86a8-2542b46ce4f0 service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Updated VIF entry in instance network info cache for port 55c5c37a-1605-4edb-957e-04160d41ff01. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1135.130595] env[62522]: DEBUG nova.network.neutron [req-069080ce-3f46-4288-a345-61ec616ec857 req-9d607149-c925-4223-86a8-2542b46ce4f0 service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Updating instance_info_cache with network_info: [{"id": "55c5c37a-1605-4edb-957e-04160d41ff01", "address": "fa:16:3e:07:85:b9", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55c5c37a-16", "ovs_interfaceid": "55c5c37a-1605-4edb-957e-04160d41ff01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1135.197960] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a14bef12-5031-48eb-8f0d-7574f5d72f51 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.205733] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea11c2f-4f19-4639-8991-e5192a3db5fb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.237262] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b6fb33e-4d28-4a42-b4bf-cc4ee758b976 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.244851] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d6cd23f-d795-474d-9a2c-601d3a2ec42e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.258282] env[62522]: DEBUG nova.compute.provider_tree [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1135.267379] env[62522]: DEBUG oslo_vmware.api [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416238, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.071448} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.268196] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] f3894644-eb7e-4a6d-9029-4cd30466d6f8/f3894644-eb7e-4a6d-9029-4cd30466d6f8.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1135.268415] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1135.268657] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a25f2f35-a652-4c33-ba69-85a65712b8ce {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.275061] env[62522]: DEBUG oslo_vmware.api [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 1135.275061] env[62522]: value = "task-2416240" [ 1135.275061] env[62522]: _type = "Task" [ 1135.275061] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.286011] env[62522]: DEBUG oslo_vmware.api [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416240, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.547297] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416239, 'name': CreateVM_Task} progress is 25%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.632741] env[62522]: DEBUG oslo_concurrency.lockutils [req-069080ce-3f46-4288-a345-61ec616ec857 req-9d607149-c925-4223-86a8-2542b46ce4f0 service nova] Releasing lock "refresh_cache-892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1135.633081] env[62522]: DEBUG nova.compute.manager [req-069080ce-3f46-4288-a345-61ec616ec857 req-9d607149-c925-4223-86a8-2542b46ce4f0 service nova] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Received event network-changed-58444651-b47b-44d5-b240-53949c79df86 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1135.633307] env[62522]: DEBUG nova.compute.manager [req-069080ce-3f46-4288-a345-61ec616ec857 req-9d607149-c925-4223-86a8-2542b46ce4f0 service nova] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Refreshing instance network info cache due to event network-changed-58444651-b47b-44d5-b240-53949c79df86. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1135.633487] env[62522]: DEBUG oslo_concurrency.lockutils [req-069080ce-3f46-4288-a345-61ec616ec857 req-9d607149-c925-4223-86a8-2542b46ce4f0 service nova] Acquiring lock "refresh_cache-b31195c2-29f4-475c-baa7-fcb4791b7278" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1135.634188] env[62522]: DEBUG oslo_concurrency.lockutils [req-069080ce-3f46-4288-a345-61ec616ec857 req-9d607149-c925-4223-86a8-2542b46ce4f0 service nova] Acquired lock "refresh_cache-b31195c2-29f4-475c-baa7-fcb4791b7278" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1135.635070] env[62522]: DEBUG nova.network.neutron [req-069080ce-3f46-4288-a345-61ec616ec857 req-9d607149-c925-4223-86a8-2542b46ce4f0 service nova] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Refreshing network info cache for port 58444651-b47b-44d5-b240-53949c79df86 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1135.676282] env[62522]: DEBUG nova.network.neutron [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Updating instance_info_cache with network_info: [{"id": "661819ce-17f6-47b5-a704-1c8c43e50373", "address": "fa:16:3e:60:76:34", "network": {"id": "2a4f6f01-ad28-4f8f-b835-ea86e1791f49", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1577320995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82346c440c3343a0a5c233a48203a13c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap661819ce-17", "ovs_interfaceid": "661819ce-17f6-47b5-a704-1c8c43e50373", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1135.763579] env[62522]: DEBUG nova.scheduler.client.report [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1135.785975] env[62522]: DEBUG oslo_vmware.api [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416240, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.119743} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.787107] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1135.787888] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fd0613c-246a-426d-8994-43def28d5164 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.811557] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] f3894644-eb7e-4a6d-9029-4cd30466d6f8/f3894644-eb7e-4a6d-9029-4cd30466d6f8.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1135.814099] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dacf74db-78a6-40e7-b208-9c33b5a6be8f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.833737] env[62522]: DEBUG oslo_vmware.api [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 1135.833737] env[62522]: value = "task-2416241" [ 1135.833737] env[62522]: _type = "Task" [ 1135.833737] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.840947] env[62522]: DEBUG oslo_vmware.api [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416241, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.859462] env[62522]: DEBUG nova.network.neutron [req-a92f48e3-b5f9-4f32-88c9-a4fcb3e67b68 req-7a43877a-e83f-40ea-95b5-88c7ebc49109 service nova] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Updated VIF entry in instance network info cache for port 7eafd593-e029-4a97-afc2-234f1dd50f20. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1135.859789] env[62522]: DEBUG nova.network.neutron [req-a92f48e3-b5f9-4f32-88c9-a4fcb3e67b68 req-7a43877a-e83f-40ea-95b5-88c7ebc49109 service nova] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Updating instance_info_cache with network_info: [{"id": "7eafd593-e029-4a97-afc2-234f1dd50f20", "address": "fa:16:3e:33:2b:5e", "network": {"id": "21d0ca22-5509-4f9b-b167-f9fde2dac808", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-547933771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a06421250694a98b13ff34ad816dc75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7eafd593-e0", "ovs_interfaceid": "7eafd593-e029-4a97-afc2-234f1dd50f20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1136.048157] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416239, 'name': CreateVM_Task, 'duration_secs': 0.68666} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.048494] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1136.049069] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1136.049260] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.049579] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1136.049861] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7bfc8a8-67c2-4dc9-aee7-d07f3bb4c08a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.055244] env[62522]: DEBUG oslo_vmware.api [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1136.055244] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d145a7-97ed-a7af-4083-ec9f5f063b4c" [ 1136.055244] env[62522]: _type = "Task" [ 1136.055244] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.062924] env[62522]: DEBUG oslo_vmware.api [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d145a7-97ed-a7af-4083-ec9f5f063b4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.178444] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Releasing lock "refresh_cache-7f8a8270-5014-446c-aa42-ea0b4079e5a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1136.272786] env[62522]: DEBUG oslo_concurrency.lockutils [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.720s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.290699] env[62522]: INFO nova.scheduler.client.report [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Deleted allocations for instance ebca687d-4de7-4fd6-99fb-b4f0154abe9c [ 1136.344605] env[62522]: DEBUG oslo_vmware.api [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416241, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.362712] env[62522]: DEBUG oslo_concurrency.lockutils [req-a92f48e3-b5f9-4f32-88c9-a4fcb3e67b68 req-7a43877a-e83f-40ea-95b5-88c7ebc49109 service nova] Releasing lock "refresh_cache-80dd48b7-09fb-4127-af11-b2d52a49ca12" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1136.370718] env[62522]: DEBUG nova.network.neutron [req-069080ce-3f46-4288-a345-61ec616ec857 req-9d607149-c925-4223-86a8-2542b46ce4f0 service nova] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Updated VIF entry in instance network info cache for port 58444651-b47b-44d5-b240-53949c79df86. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1136.371155] env[62522]: DEBUG nova.network.neutron [req-069080ce-3f46-4288-a345-61ec616ec857 req-9d607149-c925-4223-86a8-2542b46ce4f0 service nova] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Updating instance_info_cache with network_info: [{"id": "58444651-b47b-44d5-b240-53949c79df86", "address": "fa:16:3e:6c:81:68", "network": {"id": "fd993a8b-571c-4873-a5d6-4d8c60e23d38", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2084093882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "071dd4c295a54e388099d5bf0f4e300b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58444651-b4", "ovs_interfaceid": "58444651-b47b-44d5-b240-53949c79df86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1136.566046] env[62522]: DEBUG oslo_vmware.api [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d145a7-97ed-a7af-4083-ec9f5f063b4c, 'name': SearchDatastore_Task, 'duration_secs': 0.008971} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.566230] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1136.566420] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1136.566661] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1136.566810] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.566989] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1136.567276] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8914b10f-8ec3-4df9-9161-0d54ff68e93b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.580256] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1136.580436] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1136.581229] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27483290-defe-41a4-8592-695a1417a121 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.586483] env[62522]: DEBUG oslo_vmware.api [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1136.586483] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]527d2e5c-da67-63ca-274c-fc051eb59385" [ 1136.586483] env[62522]: _type = "Task" [ 1136.586483] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.593838] env[62522]: DEBUG oslo_vmware.api [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]527d2e5c-da67-63ca-274c-fc051eb59385, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.702436] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c35c6194-d92f-4d1e-9f22-aace358d7a73 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.722091] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52fd6a11-dc64-49ea-9650-3126e36702b9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.729255] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Updating instance '7f8a8270-5014-446c-aa42-ea0b4079e5a9' progress to 83 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1136.799492] env[62522]: DEBUG oslo_concurrency.lockutils [None req-21afd1b6-27ed-44ff-8a96-360369256f0c tempest-ServersTestJSON-990685860 tempest-ServersTestJSON-990685860-project-member] Lock "ebca687d-4de7-4fd6-99fb-b4f0154abe9c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.179s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.845278] env[62522]: DEBUG oslo_vmware.api [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416241, 'name': ReconfigVM_Task, 'duration_secs': 0.981396} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.845563] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Reconfigured VM instance instance-00000064 to attach disk [datastore2] f3894644-eb7e-4a6d-9029-4cd30466d6f8/f3894644-eb7e-4a6d-9029-4cd30466d6f8.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1136.846183] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ac7f3963-7f13-499c-b852-d4ccbd56da7e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.852648] env[62522]: DEBUG oslo_vmware.api [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 1136.852648] env[62522]: value = "task-2416242" [ 1136.852648] env[62522]: _type = "Task" [ 1136.852648] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.860169] env[62522]: DEBUG oslo_vmware.api [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416242, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.873810] env[62522]: DEBUG oslo_concurrency.lockutils [req-069080ce-3f46-4288-a345-61ec616ec857 req-9d607149-c925-4223-86a8-2542b46ce4f0 service nova] Releasing lock "refresh_cache-b31195c2-29f4-475c-baa7-fcb4791b7278" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1137.097986] env[62522]: DEBUG oslo_vmware.api [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]527d2e5c-da67-63ca-274c-fc051eb59385, 'name': SearchDatastore_Task, 'duration_secs': 0.010025} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.098861] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd04d71f-411b-429a-9084-4ed7266b3f1e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.104130] env[62522]: DEBUG oslo_vmware.api [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1137.104130] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]524acebe-9001-975c-a396-7899eefa0915" [ 1137.104130] env[62522]: _type = "Task" [ 1137.104130] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.112816] env[62522]: DEBUG oslo_vmware.api [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]524acebe-9001-975c-a396-7899eefa0915, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.236102] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3bc9afa6-86eb-486f-8db1-970a9568592c tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Updating instance '7f8a8270-5014-446c-aa42-ea0b4079e5a9' progress to 100 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1137.363164] env[62522]: DEBUG oslo_vmware.api [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416242, 'name': Rename_Task, 'duration_secs': 0.132717} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.363455] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1137.363734] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b70dcf33-4816-46f0-a9df-cb55e9997477 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.371139] env[62522]: DEBUG oslo_vmware.api [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 1137.371139] env[62522]: value = "task-2416243" [ 1137.371139] env[62522]: _type = "Task" [ 1137.371139] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.378627] env[62522]: DEBUG oslo_vmware.api [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416243, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.433911] env[62522]: DEBUG oslo_concurrency.lockutils [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "interface-892926ef-3044-497c-8fc8-30cd298e4311-9371b30e-3fec-41e5-88af-f58ce423428e" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1137.435589] env[62522]: DEBUG oslo_concurrency.lockutils [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "interface-892926ef-3044-497c-8fc8-30cd298e4311-9371b30e-3fec-41e5-88af-f58ce423428e" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1137.435589] env[62522]: DEBUG nova.objects.instance [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lazy-loading 'flavor' on Instance uuid 892926ef-3044-497c-8fc8-30cd298e4311 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1137.616035] env[62522]: DEBUG oslo_vmware.api [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]524acebe-9001-975c-a396-7899eefa0915, 'name': SearchDatastore_Task, 'duration_secs': 0.013509} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.616035] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1137.616035] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 80dd48b7-09fb-4127-af11-b2d52a49ca12/80dd48b7-09fb-4127-af11-b2d52a49ca12.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1137.616035] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-faf9aa4a-6ff3-4521-b21a-d36ba63ebb19 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.622756] env[62522]: DEBUG oslo_vmware.api [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1137.622756] env[62522]: value = "task-2416244" [ 1137.622756] env[62522]: _type = "Task" [ 1137.622756] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.631900] env[62522]: DEBUG oslo_vmware.api [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416244, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.887464] env[62522]: DEBUG oslo_vmware.api [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416243, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.113512] env[62522]: DEBUG nova.objects.instance [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lazy-loading 'pci_requests' on Instance uuid 892926ef-3044-497c-8fc8-30cd298e4311 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1138.133125] env[62522]: DEBUG oslo_vmware.api [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416244, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.381690] env[62522]: DEBUG oslo_vmware.api [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416243, 'name': PowerOnVM_Task, 'duration_secs': 0.970725} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.381973] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1138.382195] env[62522]: INFO nova.compute.manager [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Took 9.57 seconds to spawn the instance on the hypervisor. [ 1138.382378] env[62522]: DEBUG nova.compute.manager [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1138.383186] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dab6bc46-2389-4c66-be71-87fdc267de4b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.615885] env[62522]: DEBUG nova.objects.base [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Object Instance<892926ef-3044-497c-8fc8-30cd298e4311> lazy-loaded attributes: flavor,pci_requests {{(pid=62522) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1138.616209] env[62522]: DEBUG nova.network.neutron [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1138.632851] env[62522]: DEBUG oslo_vmware.api [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416244, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.713193] env[62522]: DEBUG nova.policy [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab5e5a8e6ee64aad8d52342ee3f5af36', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4bdd1f5caf09454d808bcdc15df2d3a7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1138.902405] env[62522]: INFO nova.compute.manager [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Took 25.54 seconds to build instance. [ 1139.134978] env[62522]: DEBUG oslo_vmware.api [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416244, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.461376} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.135403] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 80dd48b7-09fb-4127-af11-b2d52a49ca12/80dd48b7-09fb-4127-af11-b2d52a49ca12.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1139.135537] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1139.137878] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c4fa924c-89b0-4978-b934-01c869b91aba {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.142670] env[62522]: DEBUG oslo_vmware.api [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1139.142670] env[62522]: value = "task-2416245" [ 1139.142670] env[62522]: _type = "Task" [ 1139.142670] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.151869] env[62522]: DEBUG oslo_vmware.api [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416245, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.403667] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2a7ebf68-ce5e-4a1f-9735-136ab5a91f81 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.048s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1139.560721] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7c27c27b-1ebd-4094-95b6-06f0b600fb2f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "7f8a8270-5014-446c-aa42-ea0b4079e5a9" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1139.561726] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7c27c27b-1ebd-4094-95b6-06f0b600fb2f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "7f8a8270-5014-446c-aa42-ea0b4079e5a9" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1139.561726] env[62522]: DEBUG nova.compute.manager [None req-7c27c27b-1ebd-4094-95b6-06f0b600fb2f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Going to confirm migration 5 {{(pid=62522) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1139.653214] env[62522]: DEBUG oslo_vmware.api [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416245, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06891} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.653476] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1139.654325] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c753df50-aa6d-4aae-8826-51315e17112b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.675876] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] 80dd48b7-09fb-4127-af11-b2d52a49ca12/80dd48b7-09fb-4127-af11-b2d52a49ca12.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1139.676141] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9de1fb1a-ae83-4429-9c4f-46c070abe7c9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.697095] env[62522]: DEBUG oslo_vmware.api [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1139.697095] env[62522]: value = "task-2416246" [ 1139.697095] env[62522]: _type = "Task" [ 1139.697095] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.710722] env[62522]: DEBUG oslo_vmware.api [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416246, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.717384] env[62522]: DEBUG nova.compute.manager [req-60a12b96-edef-4e22-85f0-7c917aeabe2d req-1e2ff54b-805f-4a6b-bcb9-d0dc71667796 service nova] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Received event network-changed-2d7b03e9-5319-496c-b990-7663aa7aa371 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1139.717617] env[62522]: DEBUG nova.compute.manager [req-60a12b96-edef-4e22-85f0-7c917aeabe2d req-1e2ff54b-805f-4a6b-bcb9-d0dc71667796 service nova] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Refreshing instance network info cache due to event network-changed-2d7b03e9-5319-496c-b990-7663aa7aa371. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1139.717965] env[62522]: DEBUG oslo_concurrency.lockutils [req-60a12b96-edef-4e22-85f0-7c917aeabe2d req-1e2ff54b-805f-4a6b-bcb9-d0dc71667796 service nova] Acquiring lock "refresh_cache-f3894644-eb7e-4a6d-9029-4cd30466d6f8" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1139.718137] env[62522]: DEBUG oslo_concurrency.lockutils [req-60a12b96-edef-4e22-85f0-7c917aeabe2d req-1e2ff54b-805f-4a6b-bcb9-d0dc71667796 service nova] Acquired lock "refresh_cache-f3894644-eb7e-4a6d-9029-4cd30466d6f8" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.718232] env[62522]: DEBUG nova.network.neutron [req-60a12b96-edef-4e22-85f0-7c917aeabe2d req-1e2ff54b-805f-4a6b-bcb9-d0dc71667796 service nova] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Refreshing network info cache for port 2d7b03e9-5319-496c-b990-7663aa7aa371 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1140.124984] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7c27c27b-1ebd-4094-95b6-06f0b600fb2f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "refresh_cache-7f8a8270-5014-446c-aa42-ea0b4079e5a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1140.125237] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7c27c27b-1ebd-4094-95b6-06f0b600fb2f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquired lock "refresh_cache-7f8a8270-5014-446c-aa42-ea0b4079e5a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.125416] env[62522]: DEBUG nova.network.neutron [None req-7c27c27b-1ebd-4094-95b6-06f0b600fb2f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1140.125606] env[62522]: DEBUG nova.objects.instance [None req-7c27c27b-1ebd-4094-95b6-06f0b600fb2f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lazy-loading 'info_cache' on Instance uuid 7f8a8270-5014-446c-aa42-ea0b4079e5a9 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1140.210943] env[62522]: DEBUG oslo_vmware.api [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416246, 'name': ReconfigVM_Task, 'duration_secs': 0.318647} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.211384] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Reconfigured VM instance instance-00000065 to attach disk [datastore1] 80dd48b7-09fb-4127-af11-b2d52a49ca12/80dd48b7-09fb-4127-af11-b2d52a49ca12.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1140.211879] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a9c7a126-448f-4b0b-8540-1bbd70322871 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.218358] env[62522]: DEBUG oslo_vmware.api [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1140.218358] env[62522]: value = "task-2416247" [ 1140.218358] env[62522]: _type = "Task" [ 1140.218358] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.228977] env[62522]: DEBUG oslo_vmware.api [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416247, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.235043] env[62522]: DEBUG nova.compute.manager [req-65290a05-f85c-41fd-917c-bc10de2a3b31 req-3f181331-b388-4386-8cc4-3e9cea70f821 service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Received event network-vif-plugged-9371b30e-3fec-41e5-88af-f58ce423428e {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1140.235262] env[62522]: DEBUG oslo_concurrency.lockutils [req-65290a05-f85c-41fd-917c-bc10de2a3b31 req-3f181331-b388-4386-8cc4-3e9cea70f821 service nova] Acquiring lock "892926ef-3044-497c-8fc8-30cd298e4311-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1140.235469] env[62522]: DEBUG oslo_concurrency.lockutils [req-65290a05-f85c-41fd-917c-bc10de2a3b31 req-3f181331-b388-4386-8cc4-3e9cea70f821 service nova] Lock "892926ef-3044-497c-8fc8-30cd298e4311-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1140.235635] env[62522]: DEBUG oslo_concurrency.lockutils [req-65290a05-f85c-41fd-917c-bc10de2a3b31 req-3f181331-b388-4386-8cc4-3e9cea70f821 service nova] Lock "892926ef-3044-497c-8fc8-30cd298e4311-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1140.235804] env[62522]: DEBUG nova.compute.manager [req-65290a05-f85c-41fd-917c-bc10de2a3b31 req-3f181331-b388-4386-8cc4-3e9cea70f821 service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] No waiting events found dispatching network-vif-plugged-9371b30e-3fec-41e5-88af-f58ce423428e {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1140.235976] env[62522]: WARNING nova.compute.manager [req-65290a05-f85c-41fd-917c-bc10de2a3b31 req-3f181331-b388-4386-8cc4-3e9cea70f821 service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Received unexpected event network-vif-plugged-9371b30e-3fec-41e5-88af-f58ce423428e for instance with vm_state active and task_state None. [ 1140.336325] env[62522]: DEBUG nova.network.neutron [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Successfully updated port: 9371b30e-3fec-41e5-88af-f58ce423428e {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1140.491379] env[62522]: DEBUG nova.network.neutron [req-60a12b96-edef-4e22-85f0-7c917aeabe2d req-1e2ff54b-805f-4a6b-bcb9-d0dc71667796 service nova] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Updated VIF entry in instance network info cache for port 2d7b03e9-5319-496c-b990-7663aa7aa371. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1140.491771] env[62522]: DEBUG nova.network.neutron [req-60a12b96-edef-4e22-85f0-7c917aeabe2d req-1e2ff54b-805f-4a6b-bcb9-d0dc71667796 service nova] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Updating instance_info_cache with network_info: [{"id": "2d7b03e9-5319-496c-b990-7663aa7aa371", "address": "fa:16:3e:8a:f2:43", "network": {"id": "c3450427-ea7e-4a07-8399-53265d390e06", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1613138323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "686854cd52ce4809a4d315275260da54", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c42bb08a-77b4-4bba-8166-702cbb1b5f1e", "external-id": "nsx-vlan-transportzone-137", "segmentation_id": 137, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d7b03e9-53", "ovs_interfaceid": "2d7b03e9-5319-496c-b990-7663aa7aa371", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1140.727890] env[62522]: DEBUG oslo_vmware.api [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416247, 'name': Rename_Task, 'duration_secs': 0.158228} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.728099] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1140.728346] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-88a46179-92fa-4c68-b39a-e575c0f261e4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.733812] env[62522]: DEBUG oslo_vmware.api [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1140.733812] env[62522]: value = "task-2416248" [ 1140.733812] env[62522]: _type = "Task" [ 1140.733812] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.741237] env[62522]: DEBUG oslo_vmware.api [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416248, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.840680] env[62522]: DEBUG oslo_concurrency.lockutils [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "refresh_cache-892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1140.840905] env[62522]: DEBUG oslo_concurrency.lockutils [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired lock "refresh_cache-892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.841122] env[62522]: DEBUG nova.network.neutron [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1140.994738] env[62522]: DEBUG oslo_concurrency.lockutils [req-60a12b96-edef-4e22-85f0-7c917aeabe2d req-1e2ff54b-805f-4a6b-bcb9-d0dc71667796 service nova] Releasing lock "refresh_cache-f3894644-eb7e-4a6d-9029-4cd30466d6f8" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1141.243738] env[62522]: DEBUG oslo_vmware.api [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416248, 'name': PowerOnVM_Task, 'duration_secs': 0.460969} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.244110] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1141.244233] env[62522]: INFO nova.compute.manager [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Took 8.46 seconds to spawn the instance on the hypervisor. [ 1141.244414] env[62522]: DEBUG nova.compute.manager [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1141.245316] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a08109-cb88-485e-a92f-4b05f714834f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.371071] env[62522]: DEBUG nova.network.neutron [None req-7c27c27b-1ebd-4094-95b6-06f0b600fb2f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Updating instance_info_cache with network_info: [{"id": "661819ce-17f6-47b5-a704-1c8c43e50373", "address": "fa:16:3e:60:76:34", "network": {"id": "2a4f6f01-ad28-4f8f-b835-ea86e1791f49", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1577320995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82346c440c3343a0a5c233a48203a13c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap661819ce-17", "ovs_interfaceid": "661819ce-17f6-47b5-a704-1c8c43e50373", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.374163] env[62522]: WARNING nova.network.neutron [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] be0fe686-4986-439e-aa82-5cbe54104c8a already exists in list: networks containing: ['be0fe686-4986-439e-aa82-5cbe54104c8a']. ignoring it [ 1141.685666] env[62522]: DEBUG nova.network.neutron [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Updating instance_info_cache with network_info: [{"id": "55c5c37a-1605-4edb-957e-04160d41ff01", "address": "fa:16:3e:07:85:b9", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55c5c37a-16", "ovs_interfaceid": "55c5c37a-1605-4edb-957e-04160d41ff01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9371b30e-3fec-41e5-88af-f58ce423428e", "address": "fa:16:3e:15:9a:06", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9371b30e-3f", "ovs_interfaceid": "9371b30e-3fec-41e5-88af-f58ce423428e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.763030] env[62522]: INFO nova.compute.manager [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Took 25.16 seconds to build instance. [ 1141.873720] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7c27c27b-1ebd-4094-95b6-06f0b600fb2f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Releasing lock "refresh_cache-7f8a8270-5014-446c-aa42-ea0b4079e5a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1141.874026] env[62522]: DEBUG nova.objects.instance [None req-7c27c27b-1ebd-4094-95b6-06f0b600fb2f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lazy-loading 'migration_context' on Instance uuid 7f8a8270-5014-446c-aa42-ea0b4079e5a9 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1142.191267] env[62522]: DEBUG oslo_concurrency.lockutils [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Releasing lock "refresh_cache-892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1142.191930] env[62522]: DEBUG oslo_concurrency.lockutils [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1142.192104] env[62522]: DEBUG oslo_concurrency.lockutils [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired lock "892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.192995] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9de00434-5b6a-479e-9ee2-0c1911048628 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.210279] env[62522]: DEBUG nova.virt.hardware [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1142.210516] env[62522]: DEBUG nova.virt.hardware [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1142.210704] env[62522]: DEBUG nova.virt.hardware [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1142.210885] env[62522]: DEBUG nova.virt.hardware [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1142.211042] env[62522]: DEBUG nova.virt.hardware [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1142.211194] env[62522]: DEBUG nova.virt.hardware [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1142.211391] env[62522]: DEBUG nova.virt.hardware [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1142.211551] env[62522]: DEBUG nova.virt.hardware [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1142.211750] env[62522]: DEBUG nova.virt.hardware [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1142.211940] env[62522]: DEBUG nova.virt.hardware [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1142.212109] env[62522]: DEBUG nova.virt.hardware [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1142.218223] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Reconfiguring VM to attach interface {{(pid=62522) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1142.218508] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d186cf1-95d1-427d-bfed-63e329204eba {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.235359] env[62522]: DEBUG oslo_vmware.api [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 1142.235359] env[62522]: value = "task-2416249" [ 1142.235359] env[62522]: _type = "Task" [ 1142.235359] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.242805] env[62522]: DEBUG oslo_vmware.api [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416249, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.263279] env[62522]: DEBUG nova.compute.manager [req-6110dcc7-108f-4f79-90f8-74e3160d08a9 req-1563ca3a-4e77-4748-8795-c9b8fa6dd995 service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Received event network-changed-9371b30e-3fec-41e5-88af-f58ce423428e {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1142.263594] env[62522]: DEBUG nova.compute.manager [req-6110dcc7-108f-4f79-90f8-74e3160d08a9 req-1563ca3a-4e77-4748-8795-c9b8fa6dd995 service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Refreshing instance network info cache due to event network-changed-9371b30e-3fec-41e5-88af-f58ce423428e. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1142.264079] env[62522]: DEBUG oslo_concurrency.lockutils [req-6110dcc7-108f-4f79-90f8-74e3160d08a9 req-1563ca3a-4e77-4748-8795-c9b8fa6dd995 service nova] Acquiring lock "refresh_cache-892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1142.264319] env[62522]: DEBUG oslo_concurrency.lockutils [req-6110dcc7-108f-4f79-90f8-74e3160d08a9 req-1563ca3a-4e77-4748-8795-c9b8fa6dd995 service nova] Acquired lock "refresh_cache-892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.264528] env[62522]: DEBUG nova.network.neutron [req-6110dcc7-108f-4f79-90f8-74e3160d08a9 req-1563ca3a-4e77-4748-8795-c9b8fa6dd995 service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Refreshing network info cache for port 9371b30e-3fec-41e5-88af-f58ce423428e {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1142.266250] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d9f38552-c927-4249-923a-c25086bfec65 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "80dd48b7-09fb-4127-af11-b2d52a49ca12" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.671s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1142.376890] env[62522]: DEBUG nova.objects.base [None req-7c27c27b-1ebd-4094-95b6-06f0b600fb2f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Object Instance<7f8a8270-5014-446c-aa42-ea0b4079e5a9> lazy-loaded attributes: info_cache,migration_context {{(pid=62522) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1142.377840] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cd7e755-606b-426b-82de-92c2123b6051 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.397877] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a184cdb1-5f11-440f-88d4-18f3588a7475 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.405023] env[62522]: DEBUG oslo_vmware.api [None req-7c27c27b-1ebd-4094-95b6-06f0b600fb2f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1142.405023] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526ae756-c1d5-6281-c38b-0ee9118fc663" [ 1142.405023] env[62522]: _type = "Task" [ 1142.405023] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.412742] env[62522]: DEBUG oslo_vmware.api [None req-7c27c27b-1ebd-4094-95b6-06f0b600fb2f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526ae756-c1d5-6281-c38b-0ee9118fc663, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.745806] env[62522]: DEBUG oslo_vmware.api [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416249, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.918204] env[62522]: DEBUG oslo_vmware.api [None req-7c27c27b-1ebd-4094-95b6-06f0b600fb2f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526ae756-c1d5-6281-c38b-0ee9118fc663, 'name': SearchDatastore_Task, 'duration_secs': 0.007686} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.918510] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7c27c27b-1ebd-4094-95b6-06f0b600fb2f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.918732] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7c27c27b-1ebd-4094-95b6-06f0b600fb2f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1142.980373] env[62522]: DEBUG nova.network.neutron [req-6110dcc7-108f-4f79-90f8-74e3160d08a9 req-1563ca3a-4e77-4748-8795-c9b8fa6dd995 service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Updated VIF entry in instance network info cache for port 9371b30e-3fec-41e5-88af-f58ce423428e. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1142.980770] env[62522]: DEBUG nova.network.neutron [req-6110dcc7-108f-4f79-90f8-74e3160d08a9 req-1563ca3a-4e77-4748-8795-c9b8fa6dd995 service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Updating instance_info_cache with network_info: [{"id": "55c5c37a-1605-4edb-957e-04160d41ff01", "address": "fa:16:3e:07:85:b9", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55c5c37a-16", "ovs_interfaceid": "55c5c37a-1605-4edb-957e-04160d41ff01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9371b30e-3fec-41e5-88af-f58ce423428e", "address": "fa:16:3e:15:9a:06", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9371b30e-3f", "ovs_interfaceid": "9371b30e-3fec-41e5-88af-f58ce423428e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.246169] env[62522]: DEBUG oslo_vmware.api [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416249, 'name': ReconfigVM_Task, 'duration_secs': 0.60778} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.246606] env[62522]: DEBUG oslo_concurrency.lockutils [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Releasing lock "892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1143.246830] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Reconfigured VM to attach interface {{(pid=62522) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1143.267887] env[62522]: DEBUG oslo_concurrency.lockutils [None req-203d2a5b-b071-4793-9bf1-9d2b236c6477 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "80dd48b7-09fb-4127-af11-b2d52a49ca12" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1143.268174] env[62522]: DEBUG oslo_concurrency.lockutils [None req-203d2a5b-b071-4793-9bf1-9d2b236c6477 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "80dd48b7-09fb-4127-af11-b2d52a49ca12" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1143.483898] env[62522]: DEBUG oslo_concurrency.lockutils [req-6110dcc7-108f-4f79-90f8-74e3160d08a9 req-1563ca3a-4e77-4748-8795-c9b8fa6dd995 service nova] Releasing lock "refresh_cache-892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1143.538828] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ada7cd6-82bc-40f0-8047-5f6e7cdf4154 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.546574] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a6e335-19e7-4ecf-902b-27d7ddbc08d6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.576405] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e8aaa33-d3ab-41df-b8bd-843bef1e5344 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.584117] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74e3ee78-ff9f-427c-9dd2-9ad305cb35b4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.596972] env[62522]: DEBUG nova.compute.provider_tree [None req-7c27c27b-1ebd-4094-95b6-06f0b600fb2f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1143.751170] env[62522]: DEBUG oslo_concurrency.lockutils [None req-aebeab20-e69d-4008-82f2-c8ed98c95a10 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "interface-892926ef-3044-497c-8fc8-30cd298e4311-9371b30e-3fec-41e5-88af-f58ce423428e" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.317s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1143.770425] env[62522]: DEBUG nova.compute.utils [None req-203d2a5b-b071-4793-9bf1-9d2b236c6477 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1144.100466] env[62522]: DEBUG nova.scheduler.client.report [None req-7c27c27b-1ebd-4094-95b6-06f0b600fb2f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1144.273070] env[62522]: DEBUG oslo_concurrency.lockutils [None req-203d2a5b-b071-4793-9bf1-9d2b236c6477 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "80dd48b7-09fb-4127-af11-b2d52a49ca12" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.004s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1145.111037] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7c27c27b-1ebd-4094-95b6-06f0b600fb2f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.192s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1145.111313] env[62522]: DEBUG nova.compute.manager [None req-7c27c27b-1ebd-4094-95b6-06f0b600fb2f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=62522) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5376}} [ 1145.221581] env[62522]: DEBUG oslo_concurrency.lockutils [None req-187168c9-c183-4402-8786-e65d03fa9c4b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "interface-892926ef-3044-497c-8fc8-30cd298e4311-9371b30e-3fec-41e5-88af-f58ce423428e" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1145.221846] env[62522]: DEBUG oslo_concurrency.lockutils [None req-187168c9-c183-4402-8786-e65d03fa9c4b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "interface-892926ef-3044-497c-8fc8-30cd298e4311-9371b30e-3fec-41e5-88af-f58ce423428e" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1145.331088] env[62522]: DEBUG oslo_concurrency.lockutils [None req-203d2a5b-b071-4793-9bf1-9d2b236c6477 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "80dd48b7-09fb-4127-af11-b2d52a49ca12" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1145.331405] env[62522]: DEBUG oslo_concurrency.lockutils [None req-203d2a5b-b071-4793-9bf1-9d2b236c6477 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "80dd48b7-09fb-4127-af11-b2d52a49ca12" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1145.331612] env[62522]: INFO nova.compute.manager [None req-203d2a5b-b071-4793-9bf1-9d2b236c6477 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Attaching volume 664f0beb-c41a-4da2-899e-59fba71e5435 to /dev/sdb [ 1145.361731] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feceefa9-bfa1-4269-85d7-87440409f7b9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.369200] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50ab7347-49f9-468b-bdea-413f8093bf55 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.381734] env[62522]: DEBUG nova.virt.block_device [None req-203d2a5b-b071-4793-9bf1-9d2b236c6477 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Updating existing volume attachment record: 9c065dbf-08b4-401c-9b0f-3f093663a244 {{(pid=62522) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1145.674362] env[62522]: INFO nova.scheduler.client.report [None req-7c27c27b-1ebd-4094-95b6-06f0b600fb2f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Deleted allocation for migration 2b15f686-afa5-4f5f-a5cd-77d737183ec3 [ 1145.724386] env[62522]: DEBUG oslo_concurrency.lockutils [None req-187168c9-c183-4402-8786-e65d03fa9c4b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1145.724572] env[62522]: DEBUG oslo_concurrency.lockutils [None req-187168c9-c183-4402-8786-e65d03fa9c4b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired lock "892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1145.725536] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57153020-96de-4102-9cb6-9c2d5aa733d7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.745952] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82011f5f-586f-4451-8d46-0974af3db37d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.773421] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-187168c9-c183-4402-8786-e65d03fa9c4b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Reconfiguring VM to detach interface {{(pid=62522) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1145.773777] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09eae36b-49de-449f-9d41-0a06b9b46d4d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.792359] env[62522]: DEBUG oslo_vmware.api [None req-187168c9-c183-4402-8786-e65d03fa9c4b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 1145.792359] env[62522]: value = "task-2416253" [ 1145.792359] env[62522]: _type = "Task" [ 1145.792359] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.800642] env[62522]: DEBUG oslo_vmware.api [None req-187168c9-c183-4402-8786-e65d03fa9c4b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416253, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.180510] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7c27c27b-1ebd-4094-95b6-06f0b600fb2f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "7f8a8270-5014-446c-aa42-ea0b4079e5a9" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.619s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1146.302465] env[62522]: DEBUG oslo_vmware.api [None req-187168c9-c183-4402-8786-e65d03fa9c4b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416253, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.438027] env[62522]: DEBUG nova.objects.instance [None req-c0f0839e-bd09-4b95-ac70-aa0b045c6d1d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lazy-loading 'flavor' on Instance uuid 7f8a8270-5014-446c-aa42-ea0b4079e5a9 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1146.803042] env[62522]: DEBUG oslo_vmware.api [None req-187168c9-c183-4402-8786-e65d03fa9c4b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416253, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.943041] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c0f0839e-bd09-4b95-ac70-aa0b045c6d1d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "refresh_cache-7f8a8270-5014-446c-aa42-ea0b4079e5a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1146.943305] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c0f0839e-bd09-4b95-ac70-aa0b045c6d1d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquired lock "refresh_cache-7f8a8270-5014-446c-aa42-ea0b4079e5a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1146.943587] env[62522]: DEBUG nova.network.neutron [None req-c0f0839e-bd09-4b95-ac70-aa0b045c6d1d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1146.943917] env[62522]: DEBUG nova.objects.instance [None req-c0f0839e-bd09-4b95-ac70-aa0b045c6d1d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lazy-loading 'info_cache' on Instance uuid 7f8a8270-5014-446c-aa42-ea0b4079e5a9 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1147.304900] env[62522]: DEBUG oslo_vmware.api [None req-187168c9-c183-4402-8786-e65d03fa9c4b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416253, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.447800] env[62522]: DEBUG nova.objects.base [None req-c0f0839e-bd09-4b95-ac70-aa0b045c6d1d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Object Instance<7f8a8270-5014-446c-aa42-ea0b4079e5a9> lazy-loaded attributes: flavor,info_cache {{(pid=62522) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1147.804831] env[62522]: DEBUG oslo_vmware.api [None req-187168c9-c183-4402-8786-e65d03fa9c4b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416253, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.195886] env[62522]: DEBUG nova.network.neutron [None req-c0f0839e-bd09-4b95-ac70-aa0b045c6d1d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Updating instance_info_cache with network_info: [{"id": "661819ce-17f6-47b5-a704-1c8c43e50373", "address": "fa:16:3e:60:76:34", "network": {"id": "2a4f6f01-ad28-4f8f-b835-ea86e1791f49", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1577320995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82346c440c3343a0a5c233a48203a13c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap661819ce-17", "ovs_interfaceid": "661819ce-17f6-47b5-a704-1c8c43e50373", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1148.306057] env[62522]: DEBUG oslo_vmware.api [None req-187168c9-c183-4402-8786-e65d03fa9c4b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416253, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.698720] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c0f0839e-bd09-4b95-ac70-aa0b045c6d1d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Releasing lock "refresh_cache-7f8a8270-5014-446c-aa42-ea0b4079e5a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1148.806574] env[62522]: DEBUG oslo_vmware.api [None req-187168c9-c183-4402-8786-e65d03fa9c4b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416253, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.306570] env[62522]: DEBUG oslo_vmware.api [None req-187168c9-c183-4402-8786-e65d03fa9c4b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416253, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.704365] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0f0839e-bd09-4b95-ac70-aa0b045c6d1d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1149.704697] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8dc6f5aa-1539-4efa-95d1-7eb276bfb8fb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.712191] env[62522]: DEBUG oslo_vmware.api [None req-c0f0839e-bd09-4b95-ac70-aa0b045c6d1d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1149.712191] env[62522]: value = "task-2416255" [ 1149.712191] env[62522]: _type = "Task" [ 1149.712191] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.719450] env[62522]: DEBUG oslo_vmware.api [None req-c0f0839e-bd09-4b95-ac70-aa0b045c6d1d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416255, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.807873] env[62522]: DEBUG oslo_vmware.api [None req-187168c9-c183-4402-8786-e65d03fa9c4b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416253, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.925632] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-203d2a5b-b071-4793-9bf1-9d2b236c6477 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Volume attach. Driver type: vmdk {{(pid=62522) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1149.925883] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-203d2a5b-b071-4793-9bf1-9d2b236c6477 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489841', 'volume_id': '664f0beb-c41a-4da2-899e-59fba71e5435', 'name': 'volume-664f0beb-c41a-4da2-899e-59fba71e5435', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '80dd48b7-09fb-4127-af11-b2d52a49ca12', 'attached_at': '', 'detached_at': '', 'volume_id': '664f0beb-c41a-4da2-899e-59fba71e5435', 'serial': '664f0beb-c41a-4da2-899e-59fba71e5435'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1149.926790] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d56ed8b6-5e80-4702-ad1c-4e66afd80a0d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.943095] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fb3988d-bb4a-4a55-ab3f-641f1384f482 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.966933] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-203d2a5b-b071-4793-9bf1-9d2b236c6477 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] volume-664f0beb-c41a-4da2-899e-59fba71e5435/volume-664f0beb-c41a-4da2-899e-59fba71e5435.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1149.967208] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-de0448dc-96a6-49fb-98bf-e41018d2d9cf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.984447] env[62522]: DEBUG oslo_vmware.api [None req-203d2a5b-b071-4793-9bf1-9d2b236c6477 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1149.984447] env[62522]: value = "task-2416256" [ 1149.984447] env[62522]: _type = "Task" [ 1149.984447] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.992028] env[62522]: DEBUG oslo_vmware.api [None req-203d2a5b-b071-4793-9bf1-9d2b236c6477 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416256, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.222816] env[62522]: DEBUG oslo_vmware.api [None req-c0f0839e-bd09-4b95-ac70-aa0b045c6d1d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416255, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.307356] env[62522]: DEBUG oslo_vmware.api [None req-187168c9-c183-4402-8786-e65d03fa9c4b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416253, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.495776] env[62522]: DEBUG oslo_vmware.api [None req-203d2a5b-b071-4793-9bf1-9d2b236c6477 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416256, 'name': ReconfigVM_Task, 'duration_secs': 0.347539} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.496010] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-203d2a5b-b071-4793-9bf1-9d2b236c6477 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Reconfigured VM instance instance-00000065 to attach disk [datastore1] volume-664f0beb-c41a-4da2-899e-59fba71e5435/volume-664f0beb-c41a-4da2-899e-59fba71e5435.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1150.500591] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7a628dbb-fd59-44e6-88b1-743d0fffc8b5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.516885] env[62522]: DEBUG oslo_vmware.api [None req-203d2a5b-b071-4793-9bf1-9d2b236c6477 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1150.516885] env[62522]: value = "task-2416257" [ 1150.516885] env[62522]: _type = "Task" [ 1150.516885] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.524935] env[62522]: DEBUG oslo_vmware.api [None req-203d2a5b-b071-4793-9bf1-9d2b236c6477 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416257, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.723101] env[62522]: DEBUG oslo_vmware.api [None req-c0f0839e-bd09-4b95-ac70-aa0b045c6d1d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416255, 'name': PowerOnVM_Task, 'duration_secs': 0.542907} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.723482] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0f0839e-bd09-4b95-ac70-aa0b045c6d1d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1150.723571] env[62522]: DEBUG nova.compute.manager [None req-c0f0839e-bd09-4b95-ac70-aa0b045c6d1d tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1150.724326] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00457ea1-b9ab-4c10-ad93-5a5a2901bdd2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.807392] env[62522]: DEBUG oslo_vmware.api [None req-187168c9-c183-4402-8786-e65d03fa9c4b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416253, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.027569] env[62522]: DEBUG oslo_vmware.api [None req-203d2a5b-b071-4793-9bf1-9d2b236c6477 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416257, 'name': ReconfigVM_Task, 'duration_secs': 0.134453} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.027882] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-203d2a5b-b071-4793-9bf1-9d2b236c6477 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489841', 'volume_id': '664f0beb-c41a-4da2-899e-59fba71e5435', 'name': 'volume-664f0beb-c41a-4da2-899e-59fba71e5435', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '80dd48b7-09fb-4127-af11-b2d52a49ca12', 'attached_at': '', 'detached_at': '', 'volume_id': '664f0beb-c41a-4da2-899e-59fba71e5435', 'serial': '664f0beb-c41a-4da2-899e-59fba71e5435'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1151.308783] env[62522]: DEBUG oslo_vmware.api [None req-187168c9-c183-4402-8786-e65d03fa9c4b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416253, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.809733] env[62522]: DEBUG oslo_vmware.api [None req-187168c9-c183-4402-8786-e65d03fa9c4b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416253, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.065513] env[62522]: DEBUG nova.objects.instance [None req-203d2a5b-b071-4793-9bf1-9d2b236c6477 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lazy-loading 'flavor' on Instance uuid 80dd48b7-09fb-4127-af11-b2d52a49ca12 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1152.135637] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9a97cd6e-1b04-42b7-9943-c60c699fbe87 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "7f8a8270-5014-446c-aa42-ea0b4079e5a9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.135924] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9a97cd6e-1b04-42b7-9943-c60c699fbe87 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "7f8a8270-5014-446c-aa42-ea0b4079e5a9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.136182] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9a97cd6e-1b04-42b7-9943-c60c699fbe87 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "7f8a8270-5014-446c-aa42-ea0b4079e5a9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.136377] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9a97cd6e-1b04-42b7-9943-c60c699fbe87 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "7f8a8270-5014-446c-aa42-ea0b4079e5a9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.136551] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9a97cd6e-1b04-42b7-9943-c60c699fbe87 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "7f8a8270-5014-446c-aa42-ea0b4079e5a9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.139097] env[62522]: INFO nova.compute.manager [None req-9a97cd6e-1b04-42b7-9943-c60c699fbe87 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Terminating instance [ 1152.311451] env[62522]: DEBUG oslo_vmware.api [None req-187168c9-c183-4402-8786-e65d03fa9c4b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416253, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.570309] env[62522]: DEBUG oslo_concurrency.lockutils [None req-203d2a5b-b071-4793-9bf1-9d2b236c6477 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "80dd48b7-09fb-4127-af11-b2d52a49ca12" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.239s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.643280] env[62522]: DEBUG nova.compute.manager [None req-9a97cd6e-1b04-42b7-9943-c60c699fbe87 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1152.643544] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9a97cd6e-1b04-42b7-9943-c60c699fbe87 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1152.644475] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-520c9632-8ed3-4c36-8f1d-d85c38764863 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.652895] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a97cd6e-1b04-42b7-9943-c60c699fbe87 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1152.653141] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5b96908f-bded-40b7-98d5-7977d7ae1ea9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.659184] env[62522]: DEBUG oslo_vmware.api [None req-9a97cd6e-1b04-42b7-9943-c60c699fbe87 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1152.659184] env[62522]: value = "task-2416258" [ 1152.659184] env[62522]: _type = "Task" [ 1152.659184] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.667229] env[62522]: DEBUG oslo_vmware.api [None req-9a97cd6e-1b04-42b7-9943-c60c699fbe87 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416258, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.674827] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "80dd48b7-09fb-4127-af11-b2d52a49ca12" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.675126] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "80dd48b7-09fb-4127-af11-b2d52a49ca12" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.675413] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "80dd48b7-09fb-4127-af11-b2d52a49ca12-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.675657] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "80dd48b7-09fb-4127-af11-b2d52a49ca12-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.675882] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "80dd48b7-09fb-4127-af11-b2d52a49ca12-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.678443] env[62522]: INFO nova.compute.manager [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Terminating instance [ 1152.810550] env[62522]: DEBUG oslo_vmware.api [None req-187168c9-c183-4402-8786-e65d03fa9c4b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416253, 'name': ReconfigVM_Task, 'duration_secs': 6.769345} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.810550] env[62522]: DEBUG oslo_concurrency.lockutils [None req-187168c9-c183-4402-8786-e65d03fa9c4b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Releasing lock "892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1152.811027] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-187168c9-c183-4402-8786-e65d03fa9c4b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Reconfigured VM to detach interface {{(pid=62522) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1153.169240] env[62522]: DEBUG oslo_vmware.api [None req-9a97cd6e-1b04-42b7-9943-c60c699fbe87 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416258, 'name': PowerOffVM_Task, 'duration_secs': 0.176397} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.169500] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a97cd6e-1b04-42b7-9943-c60c699fbe87 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1153.169674] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9a97cd6e-1b04-42b7-9943-c60c699fbe87 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1153.169948] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-66773d99-c0c9-478a-821e-90d8dcb6b576 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.182609] env[62522]: DEBUG nova.compute.manager [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1153.182849] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1153.183123] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e5a65724-f92e-40eb-a819-4366820ca6bc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.188932] env[62522]: DEBUG oslo_vmware.api [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1153.188932] env[62522]: value = "task-2416260" [ 1153.188932] env[62522]: _type = "Task" [ 1153.188932] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.196963] env[62522]: DEBUG oslo_vmware.api [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416260, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.698455] env[62522]: DEBUG oslo_vmware.api [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416260, 'name': PowerOffVM_Task, 'duration_secs': 0.207109} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.698705] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1153.698903] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Volume detach. Driver type: vmdk {{(pid=62522) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1153.699110] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489841', 'volume_id': '664f0beb-c41a-4da2-899e-59fba71e5435', 'name': 'volume-664f0beb-c41a-4da2-899e-59fba71e5435', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '80dd48b7-09fb-4127-af11-b2d52a49ca12', 'attached_at': '', 'detached_at': '', 'volume_id': '664f0beb-c41a-4da2-899e-59fba71e5435', 'serial': '664f0beb-c41a-4da2-899e-59fba71e5435'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1153.699868] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-974bf187-2134-4916-b57a-54daa33de2fd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.720248] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbbe6eb8-52fd-46da-a1e8-c95c876ceb3b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.726587] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aa78fc8-ba75-4254-9612-93a03c8d8cb2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.748155] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bca485f-0820-4b10-b404-6c96c64f1584 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.762515] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] The volume has not been displaced from its original location: [datastore1] volume-664f0beb-c41a-4da2-899e-59fba71e5435/volume-664f0beb-c41a-4da2-899e-59fba71e5435.vmdk. No consolidation needed. {{(pid=62522) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1153.767660] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Reconfiguring VM instance instance-00000065 to detach disk 2001 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1153.767929] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bf23317f-ac0c-4fdd-9382-5c95be158404 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.786093] env[62522]: DEBUG oslo_vmware.api [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1153.786093] env[62522]: value = "task-2416261" [ 1153.786093] env[62522]: _type = "Task" [ 1153.786093] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.793790] env[62522]: DEBUG oslo_vmware.api [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416261, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.107351] env[62522]: DEBUG oslo_concurrency.lockutils [None req-187168c9-c183-4402-8786-e65d03fa9c4b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "refresh_cache-892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1154.107351] env[62522]: DEBUG oslo_concurrency.lockutils [None req-187168c9-c183-4402-8786-e65d03fa9c4b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired lock "refresh_cache-892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1154.107351] env[62522]: DEBUG nova.network.neutron [None req-187168c9-c183-4402-8786-e65d03fa9c4b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1154.125496] env[62522]: DEBUG nova.compute.manager [req-d45e28f7-7b67-4797-8043-2375de7910cc req-e15ee3fc-60dc-4c62-bc10-299d16ceec3e service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Received event network-changed-55c5c37a-1605-4edb-957e-04160d41ff01 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1154.125496] env[62522]: DEBUG nova.compute.manager [req-d45e28f7-7b67-4797-8043-2375de7910cc req-e15ee3fc-60dc-4c62-bc10-299d16ceec3e service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Refreshing instance network info cache due to event network-changed-55c5c37a-1605-4edb-957e-04160d41ff01. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1154.125496] env[62522]: DEBUG oslo_concurrency.lockutils [req-d45e28f7-7b67-4797-8043-2375de7910cc req-e15ee3fc-60dc-4c62-bc10-299d16ceec3e service nova] Acquiring lock "refresh_cache-892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1154.296162] env[62522]: DEBUG oslo_vmware.api [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416261, 'name': ReconfigVM_Task, 'duration_secs': 0.200656} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.296440] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Reconfigured VM instance instance-00000065 to detach disk 2001 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1154.301126] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-80fb0886-7c07-4b1b-9f11-d2cb26feafc8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.319908] env[62522]: DEBUG oslo_vmware.api [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1154.319908] env[62522]: value = "task-2416262" [ 1154.319908] env[62522]: _type = "Task" [ 1154.319908] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.328571] env[62522]: DEBUG oslo_vmware.api [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416262, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.798058] env[62522]: INFO nova.network.neutron [None req-187168c9-c183-4402-8786-e65d03fa9c4b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Port 9371b30e-3fec-41e5-88af-f58ce423428e from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1154.798440] env[62522]: DEBUG nova.network.neutron [None req-187168c9-c183-4402-8786-e65d03fa9c4b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Updating instance_info_cache with network_info: [{"id": "55c5c37a-1605-4edb-957e-04160d41ff01", "address": "fa:16:3e:07:85:b9", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55c5c37a-16", "ovs_interfaceid": "55c5c37a-1605-4edb-957e-04160d41ff01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1154.815762] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "interface-93a2505e-814d-4809-90a9-0bc215406efd-9371b30e-3fec-41e5-88af-f58ce423428e" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1154.815979] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "interface-93a2505e-814d-4809-90a9-0bc215406efd-9371b30e-3fec-41e5-88af-f58ce423428e" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1154.816361] env[62522]: DEBUG nova.objects.instance [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lazy-loading 'flavor' on Instance uuid 93a2505e-814d-4809-90a9-0bc215406efd {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1154.829781] env[62522]: DEBUG oslo_vmware.api [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416262, 'name': ReconfigVM_Task, 'duration_secs': 0.151529} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.830567] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489841', 'volume_id': '664f0beb-c41a-4da2-899e-59fba71e5435', 'name': 'volume-664f0beb-c41a-4da2-899e-59fba71e5435', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '80dd48b7-09fb-4127-af11-b2d52a49ca12', 'attached_at': '', 'detached_at': '', 'volume_id': '664f0beb-c41a-4da2-899e-59fba71e5435', 'serial': '664f0beb-c41a-4da2-899e-59fba71e5435'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1154.830912] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1154.831664] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7ad2fde-4610-4b4d-9394-c44bbc0c15fa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.838497] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1154.838704] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ddd5ef91-f9c2-4517-89bd-550cd7950814 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.301176] env[62522]: DEBUG oslo_concurrency.lockutils [None req-187168c9-c183-4402-8786-e65d03fa9c4b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Releasing lock "refresh_cache-892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1155.303360] env[62522]: DEBUG oslo_concurrency.lockutils [req-d45e28f7-7b67-4797-8043-2375de7910cc req-e15ee3fc-60dc-4c62-bc10-299d16ceec3e service nova] Acquired lock "refresh_cache-892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1155.304724] env[62522]: DEBUG nova.network.neutron [req-d45e28f7-7b67-4797-8043-2375de7910cc req-e15ee3fc-60dc-4c62-bc10-299d16ceec3e service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Refreshing network info cache for port 55c5c37a-1605-4edb-957e-04160d41ff01 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1155.411865] env[62522]: DEBUG nova.objects.instance [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lazy-loading 'pci_requests' on Instance uuid 93a2505e-814d-4809-90a9-0bc215406efd {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1155.806489] env[62522]: DEBUG oslo_concurrency.lockutils [None req-187168c9-c183-4402-8786-e65d03fa9c4b tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "interface-892926ef-3044-497c-8fc8-30cd298e4311-9371b30e-3fec-41e5-88af-f58ce423428e" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.584s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1155.913993] env[62522]: DEBUG nova.objects.base [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Object Instance<93a2505e-814d-4809-90a9-0bc215406efd> lazy-loaded attributes: flavor,pci_requests {{(pid=62522) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1155.914241] env[62522]: DEBUG nova.network.neutron [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1155.983132] env[62522]: DEBUG nova.policy [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab5e5a8e6ee64aad8d52342ee3f5af36', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4bdd1f5caf09454d808bcdc15df2d3a7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1156.016024] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1156.016024] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1156.016024] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Deleting the datastore file [datastore1] 80dd48b7-09fb-4127-af11-b2d52a49ca12 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1156.016024] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bc35af4d-4961-404e-9b35-7ff3d4df4da8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.017537] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9a97cd6e-1b04-42b7-9943-c60c699fbe87 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1156.017876] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9a97cd6e-1b04-42b7-9943-c60c699fbe87 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1156.018191] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a97cd6e-1b04-42b7-9943-c60c699fbe87 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Deleting the datastore file [datastore1] 7f8a8270-5014-446c-aa42-ea0b4079e5a9 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1156.018527] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8b8e6bef-0fe2-4eab-be40-ad47551b3b59 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.026838] env[62522]: DEBUG oslo_vmware.api [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1156.026838] env[62522]: value = "task-2416264" [ 1156.026838] env[62522]: _type = "Task" [ 1156.026838] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.027359] env[62522]: DEBUG oslo_vmware.api [None req-9a97cd6e-1b04-42b7-9943-c60c699fbe87 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1156.027359] env[62522]: value = "task-2416265" [ 1156.027359] env[62522]: _type = "Task" [ 1156.027359] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.039022] env[62522]: DEBUG oslo_vmware.api [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416264, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.042106] env[62522]: DEBUG oslo_vmware.api [None req-9a97cd6e-1b04-42b7-9943-c60c699fbe87 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416265, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.047628] env[62522]: DEBUG nova.network.neutron [req-d45e28f7-7b67-4797-8043-2375de7910cc req-e15ee3fc-60dc-4c62-bc10-299d16ceec3e service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Updated VIF entry in instance network info cache for port 55c5c37a-1605-4edb-957e-04160d41ff01. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1156.048145] env[62522]: DEBUG nova.network.neutron [req-d45e28f7-7b67-4797-8043-2375de7910cc req-e15ee3fc-60dc-4c62-bc10-299d16ceec3e service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Updating instance_info_cache with network_info: [{"id": "55c5c37a-1605-4edb-957e-04160d41ff01", "address": "fa:16:3e:07:85:b9", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55c5c37a-16", "ovs_interfaceid": "55c5c37a-1605-4edb-957e-04160d41ff01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1156.155420] env[62522]: DEBUG nova.compute.manager [req-0c5e6781-bbbb-4a42-87db-89fea51ec24e req-edda003b-c080-44ca-9d26-bc4ac5e624e3 service nova] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Received event network-changed-43d86dfd-5c95-438b-808b-91ab1078323b {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1156.155704] env[62522]: DEBUG nova.compute.manager [req-0c5e6781-bbbb-4a42-87db-89fea51ec24e req-edda003b-c080-44ca-9d26-bc4ac5e624e3 service nova] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Refreshing instance network info cache due to event network-changed-43d86dfd-5c95-438b-808b-91ab1078323b. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1156.155945] env[62522]: DEBUG oslo_concurrency.lockutils [req-0c5e6781-bbbb-4a42-87db-89fea51ec24e req-edda003b-c080-44ca-9d26-bc4ac5e624e3 service nova] Acquiring lock "refresh_cache-93a2505e-814d-4809-90a9-0bc215406efd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1156.155981] env[62522]: DEBUG oslo_concurrency.lockutils [req-0c5e6781-bbbb-4a42-87db-89fea51ec24e req-edda003b-c080-44ca-9d26-bc4ac5e624e3 service nova] Acquired lock "refresh_cache-93a2505e-814d-4809-90a9-0bc215406efd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1156.156138] env[62522]: DEBUG nova.network.neutron [req-0c5e6781-bbbb-4a42-87db-89fea51ec24e req-edda003b-c080-44ca-9d26-bc4ac5e624e3 service nova] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Refreshing network info cache for port 43d86dfd-5c95-438b-808b-91ab1078323b {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1156.542712] env[62522]: DEBUG oslo_vmware.api [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416264, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.185087} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.543174] env[62522]: DEBUG oslo_vmware.api [None req-9a97cd6e-1b04-42b7-9943-c60c699fbe87 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416265, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.191656} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.543291] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1156.543477] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1156.543661] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1156.543847] env[62522]: INFO nova.compute.manager [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Took 3.36 seconds to destroy the instance on the hypervisor. [ 1156.544145] env[62522]: DEBUG oslo.service.loopingcall [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1156.544340] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a97cd6e-1b04-42b7-9943-c60c699fbe87 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1156.544510] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9a97cd6e-1b04-42b7-9943-c60c699fbe87 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1156.544679] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9a97cd6e-1b04-42b7-9943-c60c699fbe87 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1156.544840] env[62522]: INFO nova.compute.manager [None req-9a97cd6e-1b04-42b7-9943-c60c699fbe87 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Took 3.90 seconds to destroy the instance on the hypervisor. [ 1156.545062] env[62522]: DEBUG oslo.service.loopingcall [None req-9a97cd6e-1b04-42b7-9943-c60c699fbe87 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1156.545245] env[62522]: DEBUG nova.compute.manager [-] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1156.545342] env[62522]: DEBUG nova.network.neutron [-] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1156.546887] env[62522]: DEBUG nova.compute.manager [-] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1156.546994] env[62522]: DEBUG nova.network.neutron [-] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1156.551227] env[62522]: DEBUG oslo_concurrency.lockutils [req-d45e28f7-7b67-4797-8043-2375de7910cc req-e15ee3fc-60dc-4c62-bc10-299d16ceec3e service nova] Releasing lock "refresh_cache-892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1157.216535] env[62522]: DEBUG nova.network.neutron [req-0c5e6781-bbbb-4a42-87db-89fea51ec24e req-edda003b-c080-44ca-9d26-bc4ac5e624e3 service nova] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Updated VIF entry in instance network info cache for port 43d86dfd-5c95-438b-808b-91ab1078323b. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1157.216927] env[62522]: DEBUG nova.network.neutron [req-0c5e6781-bbbb-4a42-87db-89fea51ec24e req-edda003b-c080-44ca-9d26-bc4ac5e624e3 service nova] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Updating instance_info_cache with network_info: [{"id": "43d86dfd-5c95-438b-808b-91ab1078323b", "address": "fa:16:3e:ee:37:0f", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43d86dfd-5c", "ovs_interfaceid": "43d86dfd-5c95-438b-808b-91ab1078323b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1157.381429] env[62522]: DEBUG nova.compute.manager [req-6093bb09-0f5b-4a99-931d-b616fffc49ee req-2be89234-2b1c-4d88-9384-e6a39227677b service nova] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Received event network-vif-plugged-9371b30e-3fec-41e5-88af-f58ce423428e {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1157.381678] env[62522]: DEBUG oslo_concurrency.lockutils [req-6093bb09-0f5b-4a99-931d-b616fffc49ee req-2be89234-2b1c-4d88-9384-e6a39227677b service nova] Acquiring lock "93a2505e-814d-4809-90a9-0bc215406efd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1157.385077] env[62522]: DEBUG oslo_concurrency.lockutils [req-6093bb09-0f5b-4a99-931d-b616fffc49ee req-2be89234-2b1c-4d88-9384-e6a39227677b service nova] Lock "93a2505e-814d-4809-90a9-0bc215406efd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1157.386663] env[62522]: DEBUG oslo_concurrency.lockutils [req-6093bb09-0f5b-4a99-931d-b616fffc49ee req-2be89234-2b1c-4d88-9384-e6a39227677b service nova] Lock "93a2505e-814d-4809-90a9-0bc215406efd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.003s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1157.386663] env[62522]: DEBUG nova.compute.manager [req-6093bb09-0f5b-4a99-931d-b616fffc49ee req-2be89234-2b1c-4d88-9384-e6a39227677b service nova] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] No waiting events found dispatching network-vif-plugged-9371b30e-3fec-41e5-88af-f58ce423428e {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1157.386663] env[62522]: WARNING nova.compute.manager [req-6093bb09-0f5b-4a99-931d-b616fffc49ee req-2be89234-2b1c-4d88-9384-e6a39227677b service nova] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Received unexpected event network-vif-plugged-9371b30e-3fec-41e5-88af-f58ce423428e for instance with vm_state active and task_state None. [ 1157.474693] env[62522]: DEBUG nova.network.neutron [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Successfully updated port: 9371b30e-3fec-41e5-88af-f58ce423428e {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1157.618634] env[62522]: DEBUG nova.network.neutron [-] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1157.719408] env[62522]: DEBUG oslo_concurrency.lockutils [req-0c5e6781-bbbb-4a42-87db-89fea51ec24e req-edda003b-c080-44ca-9d26-bc4ac5e624e3 service nova] Releasing lock "refresh_cache-93a2505e-814d-4809-90a9-0bc215406efd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1157.828687] env[62522]: DEBUG nova.network.neutron [-] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1157.982632] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "refresh_cache-93a2505e-814d-4809-90a9-0bc215406efd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1157.982632] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired lock "refresh_cache-93a2505e-814d-4809-90a9-0bc215406efd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1157.982632] env[62522]: DEBUG nova.network.neutron [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1158.121173] env[62522]: INFO nova.compute.manager [-] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Took 1.58 seconds to deallocate network for instance. [ 1158.181567] env[62522]: DEBUG nova.compute.manager [req-6cdbf38b-99c8-498f-b2bf-1d7ec8138b50 req-c7886ba6-3f9a-4280-a90d-e311f332c3d1 service nova] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Received event network-vif-deleted-7eafd593-e029-4a97-afc2-234f1dd50f20 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1158.181786] env[62522]: DEBUG nova.compute.manager [req-6cdbf38b-99c8-498f-b2bf-1d7ec8138b50 req-c7886ba6-3f9a-4280-a90d-e311f332c3d1 service nova] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Received event network-vif-deleted-661819ce-17f6-47b5-a704-1c8c43e50373 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1158.330823] env[62522]: INFO nova.compute.manager [-] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Took 1.78 seconds to deallocate network for instance. [ 1158.520151] env[62522]: WARNING nova.network.neutron [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] be0fe686-4986-439e-aa82-5cbe54104c8a already exists in list: networks containing: ['be0fe686-4986-439e-aa82-5cbe54104c8a']. ignoring it [ 1158.663832] env[62522]: INFO nova.compute.manager [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Took 0.54 seconds to detach 1 volumes for instance. [ 1158.804282] env[62522]: DEBUG nova.network.neutron [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Updating instance_info_cache with network_info: [{"id": "43d86dfd-5c95-438b-808b-91ab1078323b", "address": "fa:16:3e:ee:37:0f", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43d86dfd-5c", "ovs_interfaceid": "43d86dfd-5c95-438b-808b-91ab1078323b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9371b30e-3fec-41e5-88af-f58ce423428e", "address": "fa:16:3e:15:9a:06", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9371b30e-3f", "ovs_interfaceid": "9371b30e-3fec-41e5-88af-f58ce423428e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1158.837200] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9a97cd6e-1b04-42b7-9943-c60c699fbe87 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1158.837461] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9a97cd6e-1b04-42b7-9943-c60c699fbe87 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1158.837653] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9a97cd6e-1b04-42b7-9943-c60c699fbe87 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1158.862676] env[62522]: INFO nova.scheduler.client.report [None req-9a97cd6e-1b04-42b7-9943-c60c699fbe87 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Deleted allocations for instance 7f8a8270-5014-446c-aa42-ea0b4079e5a9 [ 1159.170649] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1159.170924] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1159.171167] env[62522]: DEBUG nova.objects.instance [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lazy-loading 'resources' on Instance uuid 80dd48b7-09fb-4127-af11-b2d52a49ca12 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1159.306837] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Releasing lock "refresh_cache-93a2505e-814d-4809-90a9-0bc215406efd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1159.307508] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "93a2505e-814d-4809-90a9-0bc215406efd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1159.307676] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired lock "93a2505e-814d-4809-90a9-0bc215406efd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1159.308562] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18f46b87-6a45-4f73-af7a-813dc5d74405 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.326021] env[62522]: DEBUG nova.virt.hardware [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1159.326265] env[62522]: DEBUG nova.virt.hardware [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1159.326424] env[62522]: DEBUG nova.virt.hardware [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1159.326603] env[62522]: DEBUG nova.virt.hardware [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1159.326749] env[62522]: DEBUG nova.virt.hardware [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1159.326896] env[62522]: DEBUG nova.virt.hardware [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1159.327117] env[62522]: DEBUG nova.virt.hardware [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1159.327280] env[62522]: DEBUG nova.virt.hardware [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1159.327446] env[62522]: DEBUG nova.virt.hardware [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1159.327608] env[62522]: DEBUG nova.virt.hardware [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1159.327858] env[62522]: DEBUG nova.virt.hardware [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1159.334105] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Reconfiguring VM to attach interface {{(pid=62522) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1159.334397] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1328fca0-f688-4799-bcb9-97a46cb0b1ad {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.351071] env[62522]: DEBUG oslo_vmware.api [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 1159.351071] env[62522]: value = "task-2416266" [ 1159.351071] env[62522]: _type = "Task" [ 1159.351071] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.358420] env[62522]: DEBUG oslo_vmware.api [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416266, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.370258] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9a97cd6e-1b04-42b7-9943-c60c699fbe87 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "7f8a8270-5014-446c-aa42-ea0b4079e5a9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.234s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1159.406766] env[62522]: DEBUG nova.compute.manager [req-1da0bbef-1005-4402-b99d-9a409a61a700 req-92e8903c-7aee-4d3d-b33b-ddcf22b9ab3a service nova] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Received event network-changed-9371b30e-3fec-41e5-88af-f58ce423428e {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1159.407026] env[62522]: DEBUG nova.compute.manager [req-1da0bbef-1005-4402-b99d-9a409a61a700 req-92e8903c-7aee-4d3d-b33b-ddcf22b9ab3a service nova] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Refreshing instance network info cache due to event network-changed-9371b30e-3fec-41e5-88af-f58ce423428e. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1159.407209] env[62522]: DEBUG oslo_concurrency.lockutils [req-1da0bbef-1005-4402-b99d-9a409a61a700 req-92e8903c-7aee-4d3d-b33b-ddcf22b9ab3a service nova] Acquiring lock "refresh_cache-93a2505e-814d-4809-90a9-0bc215406efd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1159.407363] env[62522]: DEBUG oslo_concurrency.lockutils [req-1da0bbef-1005-4402-b99d-9a409a61a700 req-92e8903c-7aee-4d3d-b33b-ddcf22b9ab3a service nova] Acquired lock "refresh_cache-93a2505e-814d-4809-90a9-0bc215406efd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1159.407529] env[62522]: DEBUG nova.network.neutron [req-1da0bbef-1005-4402-b99d-9a409a61a700 req-92e8903c-7aee-4d3d-b33b-ddcf22b9ab3a service nova] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Refreshing network info cache for port 9371b30e-3fec-41e5-88af-f58ce423428e {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1159.769039] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4595bd65-fd53-4ba7-bdea-e340050f9674 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.776789] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae9001a4-69db-45f6-b439-c2df07e1b7c4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.807763] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808af4b5-38b7-4b58-830c-f1a7579d5cc5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.815575] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45dbab08-7a5d-4965-b848-b2a4f6b7512f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.829953] env[62522]: DEBUG nova.compute.provider_tree [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1159.860227] env[62522]: DEBUG oslo_vmware.api [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416266, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.111632] env[62522]: DEBUG nova.network.neutron [req-1da0bbef-1005-4402-b99d-9a409a61a700 req-92e8903c-7aee-4d3d-b33b-ddcf22b9ab3a service nova] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Updated VIF entry in instance network info cache for port 9371b30e-3fec-41e5-88af-f58ce423428e. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1160.112128] env[62522]: DEBUG nova.network.neutron [req-1da0bbef-1005-4402-b99d-9a409a61a700 req-92e8903c-7aee-4d3d-b33b-ddcf22b9ab3a service nova] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Updating instance_info_cache with network_info: [{"id": "43d86dfd-5c95-438b-808b-91ab1078323b", "address": "fa:16:3e:ee:37:0f", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43d86dfd-5c", "ovs_interfaceid": "43d86dfd-5c95-438b-808b-91ab1078323b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9371b30e-3fec-41e5-88af-f58ce423428e", "address": "fa:16:3e:15:9a:06", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9371b30e-3f", "ovs_interfaceid": "9371b30e-3fec-41e5-88af-f58ce423428e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1160.333593] env[62522]: DEBUG nova.scheduler.client.report [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1160.361200] env[62522]: DEBUG oslo_vmware.api [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416266, 'name': ReconfigVM_Task, 'duration_secs': 0.54385} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.361716] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Releasing lock "93a2505e-814d-4809-90a9-0bc215406efd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1160.361961] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Reconfigured VM to attach interface {{(pid=62522) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1160.463503] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "1c6451e0-2fae-4d2b-86d7-86f9537a6259" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1160.463503] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "1c6451e0-2fae-4d2b-86d7-86f9537a6259" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1160.615226] env[62522]: DEBUG oslo_concurrency.lockutils [req-1da0bbef-1005-4402-b99d-9a409a61a700 req-92e8903c-7aee-4d3d-b33b-ddcf22b9ab3a service nova] Releasing lock "refresh_cache-93a2505e-814d-4809-90a9-0bc215406efd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1160.838295] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.667s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1160.860031] env[62522]: INFO nova.scheduler.client.report [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Deleted allocations for instance 80dd48b7-09fb-4127-af11-b2d52a49ca12 [ 1160.866676] env[62522]: DEBUG oslo_concurrency.lockutils [None req-4c483348-5ffa-4fa2-8ba6-f19d71ba33c1 tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "interface-93a2505e-814d-4809-90a9-0bc215406efd-9371b30e-3fec-41e5-88af-f58ce423428e" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.051s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1160.965556] env[62522]: DEBUG nova.compute.manager [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1161.368561] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9d3257c8-18c4-42d2-8926-389ae32acf1e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "80dd48b7-09fb-4127-af11-b2d52a49ca12" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.693s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1161.491223] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1161.491490] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1161.493142] env[62522]: INFO nova.compute.claims [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1162.598514] env[62522]: DEBUG oslo_concurrency.lockutils [None req-68435f65-1b62-4018-9b35-7040f1a81cbe tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "interface-93a2505e-814d-4809-90a9-0bc215406efd-9371b30e-3fec-41e5-88af-f58ce423428e" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1162.598850] env[62522]: DEBUG oslo_concurrency.lockutils [None req-68435f65-1b62-4018-9b35-7040f1a81cbe tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "interface-93a2505e-814d-4809-90a9-0bc215406efd-9371b30e-3fec-41e5-88af-f58ce423428e" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1162.616341] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae1937f1-7e2b-43ad-86ff-94a1401499d9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.625343] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18a10c87-7925-4390-99be-545c56ef31a8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.658796] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-041e1667-ff31-46fa-bc1e-6aeb2aa2ad31 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.666764] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f0d5122-84fa-48a7-bed7-e5b9232df772 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.681091] env[62522]: DEBUG nova.compute.provider_tree [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1162.726565] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "24cf2f15-6f6a-4ded-b2fb-85093fddbf2b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1162.726773] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "24cf2f15-6f6a-4ded-b2fb-85093fddbf2b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1163.101446] env[62522]: DEBUG oslo_concurrency.lockutils [None req-68435f65-1b62-4018-9b35-7040f1a81cbe tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "93a2505e-814d-4809-90a9-0bc215406efd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1163.101708] env[62522]: DEBUG oslo_concurrency.lockutils [None req-68435f65-1b62-4018-9b35-7040f1a81cbe tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired lock "93a2505e-814d-4809-90a9-0bc215406efd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1163.103067] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e51cdd20-73eb-4a20-b22b-8db9f5d36a76 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.120250] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-515fab7f-f499-4d7e-80ea-fcc747fd401d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.145176] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-68435f65-1b62-4018-9b35-7040f1a81cbe tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Reconfiguring VM to detach interface {{(pid=62522) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1163.145505] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6527ea99-ba10-45fd-8d88-18418b415300 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.163103] env[62522]: DEBUG oslo_vmware.api [None req-68435f65-1b62-4018-9b35-7040f1a81cbe tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 1163.163103] env[62522]: value = "task-2416267" [ 1163.163103] env[62522]: _type = "Task" [ 1163.163103] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.170671] env[62522]: DEBUG oslo_vmware.api [None req-68435f65-1b62-4018-9b35-7040f1a81cbe tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416267, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.183738] env[62522]: DEBUG nova.scheduler.client.report [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1163.228843] env[62522]: DEBUG nova.compute.manager [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1163.282089] env[62522]: DEBUG oslo_concurrency.lockutils [None req-78dcd80c-b523-414e-96ad-5215349060e9 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "c28d2907-5b59-4df8-91a8-4ba0f2047d89" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1163.282337] env[62522]: DEBUG oslo_concurrency.lockutils [None req-78dcd80c-b523-414e-96ad-5215349060e9 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "c28d2907-5b59-4df8-91a8-4ba0f2047d89" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1163.672873] env[62522]: DEBUG oslo_vmware.api [None req-68435f65-1b62-4018-9b35-7040f1a81cbe tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416267, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.689819] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.198s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1163.690353] env[62522]: DEBUG nova.compute.manager [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1163.749965] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1163.750241] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1163.751690] env[62522]: INFO nova.compute.claims [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1163.784752] env[62522]: INFO nova.compute.manager [None req-78dcd80c-b523-414e-96ad-5215349060e9 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Detaching volume 1a6a964c-b9d4-4849-bb10-c20d35c6b3ec [ 1163.813912] env[62522]: INFO nova.virt.block_device [None req-78dcd80c-b523-414e-96ad-5215349060e9 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Attempting to driver detach volume 1a6a964c-b9d4-4849-bb10-c20d35c6b3ec from mountpoint /dev/sdb [ 1163.814204] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-78dcd80c-b523-414e-96ad-5215349060e9 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Volume detach. Driver type: vmdk {{(pid=62522) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1163.814406] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-78dcd80c-b523-414e-96ad-5215349060e9 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489819', 'volume_id': '1a6a964c-b9d4-4849-bb10-c20d35c6b3ec', 'name': 'volume-1a6a964c-b9d4-4849-bb10-c20d35c6b3ec', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'c28d2907-5b59-4df8-91a8-4ba0f2047d89', 'attached_at': '', 'detached_at': '', 'volume_id': '1a6a964c-b9d4-4849-bb10-c20d35c6b3ec', 'serial': '1a6a964c-b9d4-4849-bb10-c20d35c6b3ec'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1163.815296] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aadb051-d39d-4570-a81f-1c31fe04d7ca {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.838134] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73ff9b28-fd66-44f7-aea3-7b987d2eb527 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.845354] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-254524a4-1553-44c2-a8f8-392d2ddaca9c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.868704] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a74ef3-0dd6-4ee0-ad50-3f673314f5e4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.886746] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-78dcd80c-b523-414e-96ad-5215349060e9 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] The volume has not been displaced from its original location: [datastore1] volume-1a6a964c-b9d4-4849-bb10-c20d35c6b3ec/volume-1a6a964c-b9d4-4849-bb10-c20d35c6b3ec.vmdk. No consolidation needed. {{(pid=62522) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1163.891777] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-78dcd80c-b523-414e-96ad-5215349060e9 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Reconfiguring VM instance instance-0000003c to detach disk 2001 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1163.892748] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e26acbfb-bd07-4c37-858e-01618e0fd0cd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.911386] env[62522]: DEBUG oslo_vmware.api [None req-78dcd80c-b523-414e-96ad-5215349060e9 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1163.911386] env[62522]: value = "task-2416268" [ 1163.911386] env[62522]: _type = "Task" [ 1163.911386] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.918823] env[62522]: DEBUG oslo_vmware.api [None req-78dcd80c-b523-414e-96ad-5215349060e9 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416268, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.173990] env[62522]: DEBUG oslo_vmware.api [None req-68435f65-1b62-4018-9b35-7040f1a81cbe tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416267, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.195505] env[62522]: DEBUG nova.compute.utils [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1164.196885] env[62522]: DEBUG nova.compute.manager [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1164.197194] env[62522]: DEBUG nova.network.neutron [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1164.247774] env[62522]: DEBUG nova.policy [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c7a901dd2575462f9369f3d8819fb86d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82346c440c3343a0a5c233a48203a13c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1164.374684] env[62522]: INFO nova.compute.manager [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Rebuilding instance [ 1164.409753] env[62522]: DEBUG nova.compute.manager [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1164.410612] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c87f2eb7-7022-4886-97fb-1ef63e6972dc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.421146] env[62522]: DEBUG oslo_vmware.api [None req-78dcd80c-b523-414e-96ad-5215349060e9 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416268, 'name': ReconfigVM_Task, 'duration_secs': 0.216244} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.422592] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-78dcd80c-b523-414e-96ad-5215349060e9 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Reconfigured VM instance instance-0000003c to detach disk 2001 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1164.429105] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da27def9-ed7b-431f-8fb2-d3fdee163560 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.444073] env[62522]: DEBUG oslo_vmware.api [None req-78dcd80c-b523-414e-96ad-5215349060e9 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1164.444073] env[62522]: value = "task-2416269" [ 1164.444073] env[62522]: _type = "Task" [ 1164.444073] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.451741] env[62522]: DEBUG oslo_vmware.api [None req-78dcd80c-b523-414e-96ad-5215349060e9 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416269, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.530375] env[62522]: DEBUG nova.network.neutron [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Successfully created port: 66b8c64e-5981-4cc9-b51a-df5bce03233c {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1164.673833] env[62522]: DEBUG oslo_vmware.api [None req-68435f65-1b62-4018-9b35-7040f1a81cbe tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416267, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.700549] env[62522]: DEBUG nova.compute.manager [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1164.859763] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7318a14f-f012-4605-b49f-907c0e5b7a88 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.867435] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c4d5f90-a5d4-4949-8771-7b737205df46 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.898242] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c704cf3d-b852-4106-9e5a-ad3617f67f09 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.904987] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc083c80-49cd-430f-bc7c-ec867d0fdddf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.917569] env[62522]: DEBUG nova.compute.provider_tree [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1164.953928] env[62522]: DEBUG oslo_vmware.api [None req-78dcd80c-b523-414e-96ad-5215349060e9 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416269, 'name': ReconfigVM_Task, 'duration_secs': 0.138314} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.954263] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-78dcd80c-b523-414e-96ad-5215349060e9 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489819', 'volume_id': '1a6a964c-b9d4-4849-bb10-c20d35c6b3ec', 'name': 'volume-1a6a964c-b9d4-4849-bb10-c20d35c6b3ec', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'c28d2907-5b59-4df8-91a8-4ba0f2047d89', 'attached_at': '', 'detached_at': '', 'volume_id': '1a6a964c-b9d4-4849-bb10-c20d35c6b3ec', 'serial': '1a6a964c-b9d4-4849-bb10-c20d35c6b3ec'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1165.175245] env[62522]: DEBUG oslo_vmware.api [None req-68435f65-1b62-4018-9b35-7040f1a81cbe tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416267, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.421325] env[62522]: DEBUG nova.scheduler.client.report [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1165.442817] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1165.443207] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f24437a7-c87a-4f48-a0aa-673f5e296bc7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.452939] env[62522]: DEBUG oslo_vmware.api [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Waiting for the task: (returnval){ [ 1165.452939] env[62522]: value = "task-2416270" [ 1165.452939] env[62522]: _type = "Task" [ 1165.452939] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.460978] env[62522]: DEBUG oslo_vmware.api [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Task: {'id': task-2416270, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.512133] env[62522]: DEBUG nova.objects.instance [None req-78dcd80c-b523-414e-96ad-5215349060e9 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lazy-loading 'flavor' on Instance uuid c28d2907-5b59-4df8-91a8-4ba0f2047d89 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1165.675285] env[62522]: DEBUG oslo_vmware.api [None req-68435f65-1b62-4018-9b35-7040f1a81cbe tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416267, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.710736] env[62522]: DEBUG nova.compute.manager [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1165.737061] env[62522]: DEBUG nova.virt.hardware [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1165.737357] env[62522]: DEBUG nova.virt.hardware [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1165.737519] env[62522]: DEBUG nova.virt.hardware [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1165.737702] env[62522]: DEBUG nova.virt.hardware [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1165.737847] env[62522]: DEBUG nova.virt.hardware [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1165.737994] env[62522]: DEBUG nova.virt.hardware [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1165.738249] env[62522]: DEBUG nova.virt.hardware [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1165.738412] env[62522]: DEBUG nova.virt.hardware [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1165.738579] env[62522]: DEBUG nova.virt.hardware [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1165.738741] env[62522]: DEBUG nova.virt.hardware [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1165.738912] env[62522]: DEBUG nova.virt.hardware [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1165.740121] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14970901-54ce-4f9e-bada-3051adb6ddae {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.748293] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-990fc546-b1e5-4d81-8d1f-31f6769cb4be {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.922906] env[62522]: DEBUG nova.compute.manager [req-c267fbb4-5b43-4ba0-b4a0-1214d9964cc0 req-e2b958a6-eae2-4341-b384-ccba3d636109 service nova] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Received event network-vif-plugged-66b8c64e-5981-4cc9-b51a-df5bce03233c {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1165.923190] env[62522]: DEBUG oslo_concurrency.lockutils [req-c267fbb4-5b43-4ba0-b4a0-1214d9964cc0 req-e2b958a6-eae2-4341-b384-ccba3d636109 service nova] Acquiring lock "1c6451e0-2fae-4d2b-86d7-86f9537a6259-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1165.924228] env[62522]: DEBUG oslo_concurrency.lockutils [req-c267fbb4-5b43-4ba0-b4a0-1214d9964cc0 req-e2b958a6-eae2-4341-b384-ccba3d636109 service nova] Lock "1c6451e0-2fae-4d2b-86d7-86f9537a6259-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1165.924228] env[62522]: DEBUG oslo_concurrency.lockutils [req-c267fbb4-5b43-4ba0-b4a0-1214d9964cc0 req-e2b958a6-eae2-4341-b384-ccba3d636109 service nova] Lock "1c6451e0-2fae-4d2b-86d7-86f9537a6259-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1165.924228] env[62522]: DEBUG nova.compute.manager [req-c267fbb4-5b43-4ba0-b4a0-1214d9964cc0 req-e2b958a6-eae2-4341-b384-ccba3d636109 service nova] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] No waiting events found dispatching network-vif-plugged-66b8c64e-5981-4cc9-b51a-df5bce03233c {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1165.924228] env[62522]: WARNING nova.compute.manager [req-c267fbb4-5b43-4ba0-b4a0-1214d9964cc0 req-e2b958a6-eae2-4341-b384-ccba3d636109 service nova] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Received unexpected event network-vif-plugged-66b8c64e-5981-4cc9-b51a-df5bce03233c for instance with vm_state building and task_state spawning. [ 1165.927010] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.176s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1165.927010] env[62522]: DEBUG nova.compute.manager [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1165.965060] env[62522]: DEBUG oslo_vmware.api [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Task: {'id': task-2416270, 'name': PowerOffVM_Task, 'duration_secs': 0.176997} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.965929] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1165.966618] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1165.967096] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a07c69f9-d0e8-4ae7-a971-aaf07aed7689 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.973756] env[62522]: DEBUG oslo_vmware.api [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Waiting for the task: (returnval){ [ 1165.973756] env[62522]: value = "task-2416271" [ 1165.973756] env[62522]: _type = "Task" [ 1165.973756] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.982244] env[62522]: DEBUG oslo_vmware.api [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Task: {'id': task-2416271, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.016711] env[62522]: DEBUG nova.network.neutron [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Successfully updated port: 66b8c64e-5981-4cc9-b51a-df5bce03233c {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1166.176238] env[62522]: DEBUG oslo_vmware.api [None req-68435f65-1b62-4018-9b35-7040f1a81cbe tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416267, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.247074] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1166.431017] env[62522]: DEBUG nova.compute.utils [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1166.432795] env[62522]: DEBUG nova.compute.manager [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1166.432992] env[62522]: DEBUG nova.network.neutron [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1166.471378] env[62522]: DEBUG nova.policy [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f26eeb125397426baca60d80d635c4b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a06421250694a98b13ff34ad816dc75', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1166.484025] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] VM already powered off {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1166.484361] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Volume detach. Driver type: vmdk {{(pid=62522) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1166.484569] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489817', 'volume_id': 'd0f4a6c8-a536-4b93-85d4-2b0510f42669', 'name': 'volume-d0f4a6c8-a536-4b93-85d4-2b0510f42669', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '981a4839-28d0-4d91-88cd-99c1d263ca4d', 'attached_at': '', 'detached_at': '', 'volume_id': 'd0f4a6c8-a536-4b93-85d4-2b0510f42669', 'serial': 'd0f4a6c8-a536-4b93-85d4-2b0510f42669'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1166.485395] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59ac5105-f6ab-43e7-a760-d2e520502a39 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.504275] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b8ed607-d0b8-40c0-8a2b-8e520c30bfa0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.511509] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6a2cbfb-e448-4096-85ce-3c84d5fb0b22 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.528290] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "refresh_cache-1c6451e0-2fae-4d2b-86d7-86f9537a6259" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1166.528444] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquired lock "refresh_cache-1c6451e0-2fae-4d2b-86d7-86f9537a6259" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1166.528598] env[62522]: DEBUG nova.network.neutron [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1166.531030] env[62522]: DEBUG oslo_concurrency.lockutils [None req-78dcd80c-b523-414e-96ad-5215349060e9 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "c28d2907-5b59-4df8-91a8-4ba0f2047d89" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.248s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1166.533090] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4fb480f-f020-453d-a816-40e8a429c3db {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.551160] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] The volume has not been displaced from its original location: [datastore2] volume-d0f4a6c8-a536-4b93-85d4-2b0510f42669/volume-d0f4a6c8-a536-4b93-85d4-2b0510f42669.vmdk. No consolidation needed. {{(pid=62522) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1166.556289] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Reconfiguring VM instance instance-00000061 to detach disk 2000 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1166.556811] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8127730a-d97a-4df0-84ad-7897b667083e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.574732] env[62522]: DEBUG oslo_vmware.api [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Waiting for the task: (returnval){ [ 1166.574732] env[62522]: value = "task-2416272" [ 1166.574732] env[62522]: _type = "Task" [ 1166.574732] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.582626] env[62522]: DEBUG oslo_vmware.api [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Task: {'id': task-2416272, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.677325] env[62522]: DEBUG oslo_vmware.api [None req-68435f65-1b62-4018-9b35-7040f1a81cbe tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416267, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.752157] env[62522]: DEBUG nova.network.neutron [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Successfully created port: d25e13a4-7bac-4701-afa0-5fdd63ad7f3c {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1166.937411] env[62522]: DEBUG nova.compute.manager [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1167.062181] env[62522]: DEBUG nova.network.neutron [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1167.090712] env[62522]: DEBUG oslo_vmware.api [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Task: {'id': task-2416272, 'name': ReconfigVM_Task, 'duration_secs': 0.186426} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.091428] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Reconfigured VM instance instance-00000061 to detach disk 2000 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1167.099833] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ebba9be1-5fc1-4f04-96bd-82388dc8b896 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.120867] env[62522]: DEBUG oslo_vmware.api [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Waiting for the task: (returnval){ [ 1167.120867] env[62522]: value = "task-2416273" [ 1167.120867] env[62522]: _type = "Task" [ 1167.120867] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.128453] env[62522]: DEBUG oslo_vmware.api [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Task: {'id': task-2416273, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.176609] env[62522]: DEBUG oslo_vmware.api [None req-68435f65-1b62-4018-9b35-7040f1a81cbe tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416267, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.254348] env[62522]: DEBUG nova.network.neutron [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Updating instance_info_cache with network_info: [{"id": "66b8c64e-5981-4cc9-b51a-df5bce03233c", "address": "fa:16:3e:ab:2f:6d", "network": {"id": "2a4f6f01-ad28-4f8f-b835-ea86e1791f49", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1577320995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82346c440c3343a0a5c233a48203a13c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66b8c64e-59", "ovs_interfaceid": "66b8c64e-5981-4cc9-b51a-df5bce03233c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1167.558368] env[62522]: DEBUG oslo_concurrency.lockutils [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "c28d2907-5b59-4df8-91a8-4ba0f2047d89" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1167.558659] env[62522]: DEBUG oslo_concurrency.lockutils [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "c28d2907-5b59-4df8-91a8-4ba0f2047d89" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1167.558872] env[62522]: DEBUG oslo_concurrency.lockutils [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "c28d2907-5b59-4df8-91a8-4ba0f2047d89-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1167.559063] env[62522]: DEBUG oslo_concurrency.lockutils [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "c28d2907-5b59-4df8-91a8-4ba0f2047d89-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1167.559241] env[62522]: DEBUG oslo_concurrency.lockutils [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "c28d2907-5b59-4df8-91a8-4ba0f2047d89-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1167.561321] env[62522]: INFO nova.compute.manager [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Terminating instance [ 1167.630664] env[62522]: DEBUG oslo_vmware.api [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Task: {'id': task-2416273, 'name': ReconfigVM_Task, 'duration_secs': 0.102566} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.630989] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489817', 'volume_id': 'd0f4a6c8-a536-4b93-85d4-2b0510f42669', 'name': 'volume-d0f4a6c8-a536-4b93-85d4-2b0510f42669', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '981a4839-28d0-4d91-88cd-99c1d263ca4d', 'attached_at': '', 'detached_at': '', 'volume_id': 'd0f4a6c8-a536-4b93-85d4-2b0510f42669', 'serial': 'd0f4a6c8-a536-4b93-85d4-2b0510f42669'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1167.631277] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1167.632085] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccab81cb-b9ac-4404-a58f-aec3f4494684 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.638822] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1167.639055] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-887907b2-5bc7-44ef-9f76-9c9cd068d1f0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.677826] env[62522]: DEBUG oslo_vmware.api [None req-68435f65-1b62-4018-9b35-7040f1a81cbe tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416267, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.698802] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1167.699056] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1167.699194] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Deleting the datastore file [datastore2] 981a4839-28d0-4d91-88cd-99c1d263ca4d {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1167.699462] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-00d70e85-cf86-4fa0-893b-5a14e66e9c4f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.706058] env[62522]: DEBUG oslo_vmware.api [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Waiting for the task: (returnval){ [ 1167.706058] env[62522]: value = "task-2416275" [ 1167.706058] env[62522]: _type = "Task" [ 1167.706058] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.715598] env[62522]: DEBUG oslo_vmware.api [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Task: {'id': task-2416275, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.748272] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1167.756969] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Releasing lock "refresh_cache-1c6451e0-2fae-4d2b-86d7-86f9537a6259" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1167.757315] env[62522]: DEBUG nova.compute.manager [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Instance network_info: |[{"id": "66b8c64e-5981-4cc9-b51a-df5bce03233c", "address": "fa:16:3e:ab:2f:6d", "network": {"id": "2a4f6f01-ad28-4f8f-b835-ea86e1791f49", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1577320995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82346c440c3343a0a5c233a48203a13c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66b8c64e-59", "ovs_interfaceid": "66b8c64e-5981-4cc9-b51a-df5bce03233c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1167.757768] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ab:2f:6d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '193994c7-8e1b-4f25-a4a4-d0563845eb28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '66b8c64e-5981-4cc9-b51a-df5bce03233c', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1167.765415] env[62522]: DEBUG oslo.service.loopingcall [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1167.765609] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1167.765815] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8da9a489-00a5-4d86-998a-b20c9a86954d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.785074] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1167.785074] env[62522]: value = "task-2416276" [ 1167.785074] env[62522]: _type = "Task" [ 1167.785074] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.792114] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416276, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.947016] env[62522]: DEBUG nova.compute.manager [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1167.954591] env[62522]: DEBUG nova.compute.manager [req-3909427d-4621-4523-8464-dea869e278fd req-50aad12d-625c-4049-be10-0c42fe6db596 service nova] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Received event network-changed-66b8c64e-5981-4cc9-b51a-df5bce03233c {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1167.954799] env[62522]: DEBUG nova.compute.manager [req-3909427d-4621-4523-8464-dea869e278fd req-50aad12d-625c-4049-be10-0c42fe6db596 service nova] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Refreshing instance network info cache due to event network-changed-66b8c64e-5981-4cc9-b51a-df5bce03233c. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1167.955394] env[62522]: DEBUG oslo_concurrency.lockutils [req-3909427d-4621-4523-8464-dea869e278fd req-50aad12d-625c-4049-be10-0c42fe6db596 service nova] Acquiring lock "refresh_cache-1c6451e0-2fae-4d2b-86d7-86f9537a6259" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1167.955564] env[62522]: DEBUG oslo_concurrency.lockutils [req-3909427d-4621-4523-8464-dea869e278fd req-50aad12d-625c-4049-be10-0c42fe6db596 service nova] Acquired lock "refresh_cache-1c6451e0-2fae-4d2b-86d7-86f9537a6259" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1167.955734] env[62522]: DEBUG nova.network.neutron [req-3909427d-4621-4523-8464-dea869e278fd req-50aad12d-625c-4049-be10-0c42fe6db596 service nova] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Refreshing network info cache for port 66b8c64e-5981-4cc9-b51a-df5bce03233c {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1167.973389] env[62522]: DEBUG nova.virt.hardware [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1167.973634] env[62522]: DEBUG nova.virt.hardware [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1167.973798] env[62522]: DEBUG nova.virt.hardware [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1167.973988] env[62522]: DEBUG nova.virt.hardware [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1167.974162] env[62522]: DEBUG nova.virt.hardware [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1167.974313] env[62522]: DEBUG nova.virt.hardware [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1167.974523] env[62522]: DEBUG nova.virt.hardware [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1167.974691] env[62522]: DEBUG nova.virt.hardware [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1167.974852] env[62522]: DEBUG nova.virt.hardware [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1167.975029] env[62522]: DEBUG nova.virt.hardware [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1167.975210] env[62522]: DEBUG nova.virt.hardware [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1167.976491] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df01f6ed-f863-4801-b4e4-076a84a599f2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.984644] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97c5666a-93de-4e02-8f70-8e5bdc703120 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.065275] env[62522]: DEBUG nova.compute.manager [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1168.065503] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1168.066386] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-873e6cd8-dafb-41bd-940e-1e6ad50546db {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.073805] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1168.074067] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-770a53c5-9e68-4d45-b479-199d09b71c0b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.079375] env[62522]: DEBUG oslo_vmware.api [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1168.079375] env[62522]: value = "task-2416277" [ 1168.079375] env[62522]: _type = "Task" [ 1168.079375] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.088667] env[62522]: DEBUG oslo_vmware.api [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416277, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.178590] env[62522]: DEBUG oslo_vmware.api [None req-68435f65-1b62-4018-9b35-7040f1a81cbe tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416267, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.215440] env[62522]: DEBUG oslo_vmware.api [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Task: {'id': task-2416275, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.096048} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.215697] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1168.215885] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1168.216119] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1168.221207] env[62522]: DEBUG nova.network.neutron [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Successfully updated port: d25e13a4-7bac-4701-afa0-5fdd63ad7f3c {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1168.242619] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1168.273070] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Volume detach. Driver type: vmdk {{(pid=62522) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1168.273406] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-430828aa-69fe-4c97-99ae-06ce49414b05 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.282061] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf97436b-2d04-42b8-bf5e-4b27bc616903 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.302477] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416276, 'name': CreateVM_Task, 'duration_secs': 0.286875} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.302644] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1168.303303] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1168.303470] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.303791] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1168.304053] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c796ac03-551e-4c3f-972c-3e892e1cceb4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.314944] env[62522]: ERROR nova.compute.manager [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Failed to detach volume d0f4a6c8-a536-4b93-85d4-2b0510f42669 from /dev/sda: nova.exception.InstanceNotFound: Instance 981a4839-28d0-4d91-88cd-99c1d263ca4d could not be found. [ 1168.314944] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Traceback (most recent call last): [ 1168.314944] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 1168.314944] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] self.driver.rebuild(**kwargs) [ 1168.314944] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1168.314944] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] raise NotImplementedError() [ 1168.314944] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] NotImplementedError [ 1168.314944] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] [ 1168.314944] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] During handling of the above exception, another exception occurred: [ 1168.314944] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] [ 1168.314944] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Traceback (most recent call last): [ 1168.314944] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 1168.314944] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] self.driver.detach_volume(context, old_connection_info, [ 1168.314944] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1168.314944] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] return self._volumeops.detach_volume(connection_info, instance) [ 1168.314944] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1168.314944] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] self._detach_volume_vmdk(connection_info, instance) [ 1168.314944] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1168.314944] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1168.314944] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1168.314944] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] stable_ref.fetch_moref(session) [ 1168.314944] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1168.314944] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1168.314944] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] nova.exception.InstanceNotFound: Instance 981a4839-28d0-4d91-88cd-99c1d263ca4d could not be found. [ 1168.314944] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] [ 1168.318087] env[62522]: DEBUG oslo_vmware.api [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1168.318087] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52167269-d9a5-370e-935a-ff2ec2816b79" [ 1168.318087] env[62522]: _type = "Task" [ 1168.318087] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.325591] env[62522]: DEBUG oslo_vmware.api [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52167269-d9a5-370e-935a-ff2ec2816b79, 'name': SearchDatastore_Task, 'duration_secs': 0.013156} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.325858] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1168.326102] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1168.326335] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1168.326482] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.326659] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1168.326893] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-032d72fa-4905-4a3f-bda4-618364a0a922 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.334072] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1168.334313] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1168.334943] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9746e128-7541-4006-9f14-d5946fe706c9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.340124] env[62522]: DEBUG oslo_vmware.api [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1168.340124] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f7721d-f998-0e40-eb02-882928143b20" [ 1168.340124] env[62522]: _type = "Task" [ 1168.340124] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.350224] env[62522]: DEBUG oslo_vmware.api [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f7721d-f998-0e40-eb02-882928143b20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.448876] env[62522]: DEBUG nova.compute.utils [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Build of instance 981a4839-28d0-4d91-88cd-99c1d263ca4d aborted: Failed to rebuild volume backed instance. {{(pid=62522) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1168.451134] env[62522]: ERROR nova.compute.manager [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance 981a4839-28d0-4d91-88cd-99c1d263ca4d aborted: Failed to rebuild volume backed instance. [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Traceback (most recent call last): [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] self.driver.rebuild(**kwargs) [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] raise NotImplementedError() [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] NotImplementedError [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] During handling of the above exception, another exception occurred: [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Traceback (most recent call last): [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] File "/opt/stack/nova/nova/compute/manager.py", line 3643, in _rebuild_volume_backed_instance [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] self._detach_root_volume(context, instance, root_bdm) [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] File "/opt/stack/nova/nova/compute/manager.py", line 3622, in _detach_root_volume [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] with excutils.save_and_reraise_exception(): [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] self.force_reraise() [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] raise self.value [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] self.driver.detach_volume(context, old_connection_info, [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] return self._volumeops.detach_volume(connection_info, instance) [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] self._detach_volume_vmdk(connection_info, instance) [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] stable_ref.fetch_moref(session) [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] nova.exception.InstanceNotFound: Instance 981a4839-28d0-4d91-88cd-99c1d263ca4d could not be found. [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] During handling of the above exception, another exception occurred: [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Traceback (most recent call last): [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] File "/opt/stack/nova/nova/compute/manager.py", line 11382, in _error_out_instance_on_exception [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] yield [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] File "/opt/stack/nova/nova/compute/manager.py", line 3911, in rebuild_instance [ 1168.451134] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] self._do_rebuild_instance_with_claim( [ 1168.452219] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] File "/opt/stack/nova/nova/compute/manager.py", line 3997, in _do_rebuild_instance_with_claim [ 1168.452219] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] self._do_rebuild_instance( [ 1168.452219] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] File "/opt/stack/nova/nova/compute/manager.py", line 4189, in _do_rebuild_instance [ 1168.452219] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] self._rebuild_default_impl(**kwargs) [ 1168.452219] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] File "/opt/stack/nova/nova/compute/manager.py", line 3766, in _rebuild_default_impl [ 1168.452219] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] self._rebuild_volume_backed_instance( [ 1168.452219] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] File "/opt/stack/nova/nova/compute/manager.py", line 3658, in _rebuild_volume_backed_instance [ 1168.452219] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] raise exception.BuildAbortException( [ 1168.452219] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] nova.exception.BuildAbortException: Build of instance 981a4839-28d0-4d91-88cd-99c1d263ca4d aborted: Failed to rebuild volume backed instance. [ 1168.452219] env[62522]: ERROR nova.compute.manager [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] [ 1168.592950] env[62522]: DEBUG oslo_vmware.api [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416277, 'name': PowerOffVM_Task, 'duration_secs': 0.192665} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.592950] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1168.593145] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1168.593701] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c9d9cd12-3d63-4c8f-a4a7-fdb688f9edc9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.660181] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1168.660369] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1168.660554] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Deleting the datastore file [datastore2] c28d2907-5b59-4df8-91a8-4ba0f2047d89 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1168.660825] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9b85e940-b73a-4e75-9465-1a6029b4c759 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.668198] env[62522]: DEBUG oslo_vmware.api [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1168.668198] env[62522]: value = "task-2416279" [ 1168.668198] env[62522]: _type = "Task" [ 1168.668198] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.676852] env[62522]: DEBUG nova.network.neutron [req-3909427d-4621-4523-8464-dea869e278fd req-50aad12d-625c-4049-be10-0c42fe6db596 service nova] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Updated VIF entry in instance network info cache for port 66b8c64e-5981-4cc9-b51a-df5bce03233c. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1168.677278] env[62522]: DEBUG nova.network.neutron [req-3909427d-4621-4523-8464-dea869e278fd req-50aad12d-625c-4049-be10-0c42fe6db596 service nova] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Updating instance_info_cache with network_info: [{"id": "66b8c64e-5981-4cc9-b51a-df5bce03233c", "address": "fa:16:3e:ab:2f:6d", "network": {"id": "2a4f6f01-ad28-4f8f-b835-ea86e1791f49", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1577320995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82346c440c3343a0a5c233a48203a13c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66b8c64e-59", "ovs_interfaceid": "66b8c64e-5981-4cc9-b51a-df5bce03233c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1168.683855] env[62522]: DEBUG oslo_vmware.api [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416279, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.684147] env[62522]: DEBUG oslo_vmware.api [None req-68435f65-1b62-4018-9b35-7040f1a81cbe tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416267, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.727420] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "refresh_cache-24cf2f15-6f6a-4ded-b2fb-85093fddbf2b" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1168.727624] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquired lock "refresh_cache-24cf2f15-6f6a-4ded-b2fb-85093fddbf2b" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.727768] env[62522]: DEBUG nova.network.neutron [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1168.850292] env[62522]: DEBUG oslo_vmware.api [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f7721d-f998-0e40-eb02-882928143b20, 'name': SearchDatastore_Task, 'duration_secs': 0.008764} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.851095] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aceefc01-9c2e-4a9a-a118-d1ee501efa13 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.856226] env[62522]: DEBUG oslo_vmware.api [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1168.856226] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]527f8399-35d8-c377-7e19-8b13a799c2d9" [ 1168.856226] env[62522]: _type = "Task" [ 1168.856226] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.863622] env[62522]: DEBUG oslo_vmware.api [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]527f8399-35d8-c377-7e19-8b13a799c2d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.182126] env[62522]: DEBUG oslo_vmware.api [None req-68435f65-1b62-4018-9b35-7040f1a81cbe tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416267, 'name': ReconfigVM_Task, 'duration_secs': 5.756405} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.185047] env[62522]: DEBUG oslo_concurrency.lockutils [None req-68435f65-1b62-4018-9b35-7040f1a81cbe tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Releasing lock "93a2505e-814d-4809-90a9-0bc215406efd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1169.185275] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-68435f65-1b62-4018-9b35-7040f1a81cbe tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Reconfigured VM to detach interface {{(pid=62522) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1169.187428] env[62522]: DEBUG oslo_concurrency.lockutils [req-3909427d-4621-4523-8464-dea869e278fd req-50aad12d-625c-4049-be10-0c42fe6db596 service nova] Releasing lock "refresh_cache-1c6451e0-2fae-4d2b-86d7-86f9537a6259" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1169.187776] env[62522]: DEBUG oslo_vmware.api [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416279, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154875} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.188008] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1169.188197] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1169.188367] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1169.188551] env[62522]: INFO nova.compute.manager [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1169.188799] env[62522]: DEBUG oslo.service.loopingcall [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1169.189276] env[62522]: DEBUG nova.compute.manager [-] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1169.189375] env[62522]: DEBUG nova.network.neutron [-] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1169.246871] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1169.247063] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Starting heal instance info cache {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1169.247156] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Rebuilding the list of instances to heal {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 1169.278436] env[62522]: DEBUG nova.network.neutron [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1169.367760] env[62522]: DEBUG oslo_vmware.api [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]527f8399-35d8-c377-7e19-8b13a799c2d9, 'name': SearchDatastore_Task, 'duration_secs': 0.009623} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.368756] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1169.368756] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 1c6451e0-2fae-4d2b-86d7-86f9537a6259/1c6451e0-2fae-4d2b-86d7-86f9537a6259.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1169.368756] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4d713408-3af5-4657-abb9-e4133458d46d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.378600] env[62522]: DEBUG oslo_vmware.api [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1169.378600] env[62522]: value = "task-2416280" [ 1169.378600] env[62522]: _type = "Task" [ 1169.378600] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.387514] env[62522]: DEBUG oslo_vmware.api [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416280, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.551345] env[62522]: DEBUG nova.network.neutron [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Updating instance_info_cache with network_info: [{"id": "d25e13a4-7bac-4701-afa0-5fdd63ad7f3c", "address": "fa:16:3e:02:12:f6", "network": {"id": "21d0ca22-5509-4f9b-b167-f9fde2dac808", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-547933771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a06421250694a98b13ff34ad816dc75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd25e13a4-7b", "ovs_interfaceid": "d25e13a4-7bac-4701-afa0-5fdd63ad7f3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1169.751610] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Skipping network cache update for instance because it is being deleted. {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10462}} [ 1169.752079] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Skipping network cache update for instance because it is Building. {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 1169.752079] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Skipping network cache update for instance because it is Building. {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 1169.812451] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "refresh_cache-892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1169.812636] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquired lock "refresh_cache-892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1169.812787] env[62522]: DEBUG nova.network.neutron [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Forcefully refreshing network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1169.813008] env[62522]: DEBUG nova.objects.instance [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lazy-loading 'info_cache' on Instance uuid 892926ef-3044-497c-8fc8-30cd298e4311 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1169.889757] env[62522]: DEBUG oslo_vmware.api [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416280, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.028414] env[62522]: DEBUG nova.compute.manager [req-d0e0ee3a-52a2-4d32-831e-799e3f11dc27 req-91b21dc7-06a8-41dd-b139-e95307b11d29 service nova] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Received event network-vif-plugged-d25e13a4-7bac-4701-afa0-5fdd63ad7f3c {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1170.028414] env[62522]: DEBUG oslo_concurrency.lockutils [req-d0e0ee3a-52a2-4d32-831e-799e3f11dc27 req-91b21dc7-06a8-41dd-b139-e95307b11d29 service nova] Acquiring lock "24cf2f15-6f6a-4ded-b2fb-85093fddbf2b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1170.028592] env[62522]: DEBUG oslo_concurrency.lockutils [req-d0e0ee3a-52a2-4d32-831e-799e3f11dc27 req-91b21dc7-06a8-41dd-b139-e95307b11d29 service nova] Lock "24cf2f15-6f6a-4ded-b2fb-85093fddbf2b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1170.028788] env[62522]: DEBUG oslo_concurrency.lockutils [req-d0e0ee3a-52a2-4d32-831e-799e3f11dc27 req-91b21dc7-06a8-41dd-b139-e95307b11d29 service nova] Lock "24cf2f15-6f6a-4ded-b2fb-85093fddbf2b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1170.028964] env[62522]: DEBUG nova.compute.manager [req-d0e0ee3a-52a2-4d32-831e-799e3f11dc27 req-91b21dc7-06a8-41dd-b139-e95307b11d29 service nova] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] No waiting events found dispatching network-vif-plugged-d25e13a4-7bac-4701-afa0-5fdd63ad7f3c {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1170.031514] env[62522]: WARNING nova.compute.manager [req-d0e0ee3a-52a2-4d32-831e-799e3f11dc27 req-91b21dc7-06a8-41dd-b139-e95307b11d29 service nova] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Received unexpected event network-vif-plugged-d25e13a4-7bac-4701-afa0-5fdd63ad7f3c for instance with vm_state building and task_state spawning. [ 1170.031514] env[62522]: DEBUG nova.compute.manager [req-d0e0ee3a-52a2-4d32-831e-799e3f11dc27 req-91b21dc7-06a8-41dd-b139-e95307b11d29 service nova] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Received event network-changed-d25e13a4-7bac-4701-afa0-5fdd63ad7f3c {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1170.031514] env[62522]: DEBUG nova.compute.manager [req-d0e0ee3a-52a2-4d32-831e-799e3f11dc27 req-91b21dc7-06a8-41dd-b139-e95307b11d29 service nova] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Refreshing instance network info cache due to event network-changed-d25e13a4-7bac-4701-afa0-5fdd63ad7f3c. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1170.031514] env[62522]: DEBUG oslo_concurrency.lockutils [req-d0e0ee3a-52a2-4d32-831e-799e3f11dc27 req-91b21dc7-06a8-41dd-b139-e95307b11d29 service nova] Acquiring lock "refresh_cache-24cf2f15-6f6a-4ded-b2fb-85093fddbf2b" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1170.053911] env[62522]: DEBUG nova.compute.manager [req-661f5b11-cd84-4242-b37f-8a35cb681a64 req-ddc0b3ba-7ce7-437f-ba1c-c71fff475f46 service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Received event network-vif-deleted-931dfe44-9ac3-4df4-a4ea-6c8612389451 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1170.054157] env[62522]: INFO nova.compute.manager [req-661f5b11-cd84-4242-b37f-8a35cb681a64 req-ddc0b3ba-7ce7-437f-ba1c-c71fff475f46 service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Neutron deleted interface 931dfe44-9ac3-4df4-a4ea-6c8612389451; detaching it from the instance and deleting it from the info cache [ 1170.054341] env[62522]: DEBUG nova.network.neutron [req-661f5b11-cd84-4242-b37f-8a35cb681a64 req-ddc0b3ba-7ce7-437f-ba1c-c71fff475f46 service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1170.055485] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Releasing lock "refresh_cache-24cf2f15-6f6a-4ded-b2fb-85093fddbf2b" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1170.055825] env[62522]: DEBUG nova.compute.manager [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Instance network_info: |[{"id": "d25e13a4-7bac-4701-afa0-5fdd63ad7f3c", "address": "fa:16:3e:02:12:f6", "network": {"id": "21d0ca22-5509-4f9b-b167-f9fde2dac808", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-547933771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a06421250694a98b13ff34ad816dc75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd25e13a4-7b", "ovs_interfaceid": "d25e13a4-7bac-4701-afa0-5fdd63ad7f3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1170.056736] env[62522]: DEBUG oslo_concurrency.lockutils [req-d0e0ee3a-52a2-4d32-831e-799e3f11dc27 req-91b21dc7-06a8-41dd-b139-e95307b11d29 service nova] Acquired lock "refresh_cache-24cf2f15-6f6a-4ded-b2fb-85093fddbf2b" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1170.056819] env[62522]: DEBUG nova.network.neutron [req-d0e0ee3a-52a2-4d32-831e-799e3f11dc27 req-91b21dc7-06a8-41dd-b139-e95307b11d29 service nova] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Refreshing network info cache for port d25e13a4-7bac-4701-afa0-5fdd63ad7f3c {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1170.057975] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:12:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24144f5a-050a-4f1e-8d8c-774dc16dc791', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd25e13a4-7bac-4701-afa0-5fdd63ad7f3c', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1170.066082] env[62522]: DEBUG oslo.service.loopingcall [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1170.068143] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1170.068689] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-88ceb9b8-6468-4eb4-9a52-0b92f7594e4d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.089359] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1170.089359] env[62522]: value = "task-2416281" [ 1170.089359] env[62522]: _type = "Task" [ 1170.089359] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.097386] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416281, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.389568] env[62522]: DEBUG oslo_vmware.api [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416280, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.627833} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.389854] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 1c6451e0-2fae-4d2b-86d7-86f9537a6259/1c6451e0-2fae-4d2b-86d7-86f9537a6259.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1170.390090] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1170.391167] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5559bb47-edcc-4f7b-bb6b-5698e30b2f49 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.398967] env[62522]: DEBUG oslo_vmware.api [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1170.398967] env[62522]: value = "task-2416282" [ 1170.398967] env[62522]: _type = "Task" [ 1170.398967] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.406146] env[62522]: DEBUG oslo_vmware.api [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416282, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.467522] env[62522]: DEBUG oslo_concurrency.lockutils [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1170.467647] env[62522]: DEBUG oslo_concurrency.lockutils [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1170.514079] env[62522]: DEBUG oslo_concurrency.lockutils [None req-68435f65-1b62-4018-9b35-7040f1a81cbe tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "refresh_cache-93a2505e-814d-4809-90a9-0bc215406efd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1170.514247] env[62522]: DEBUG oslo_concurrency.lockutils [None req-68435f65-1b62-4018-9b35-7040f1a81cbe tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquired lock "refresh_cache-93a2505e-814d-4809-90a9-0bc215406efd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1170.514420] env[62522]: DEBUG nova.network.neutron [None req-68435f65-1b62-4018-9b35-7040f1a81cbe tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1170.537608] env[62522]: DEBUG nova.network.neutron [-] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1170.557467] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a53578e2-329e-49c5-94c8-49e52badcf12 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.574341] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c50dc4fd-e4d7-4025-887e-ad8744f1317c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.588704] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5a888f7-9eb0-4e6d-90ce-ea24d7cedb27 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.603136] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e425e2d9-a351-488a-9576-71b5a40aeb45 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.606370] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416281, 'name': CreateVM_Task, 'duration_secs': 0.384119} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.614847] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1170.614847] env[62522]: DEBUG nova.compute.manager [req-661f5b11-cd84-4242-b37f-8a35cb681a64 req-ddc0b3ba-7ce7-437f-ba1c-c71fff475f46 service nova] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Detach interface failed, port_id=931dfe44-9ac3-4df4-a4ea-6c8612389451, reason: Instance c28d2907-5b59-4df8-91a8-4ba0f2047d89 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1170.615821] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1170.615986] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1170.616306] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1170.616616] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17f4e5ad-993d-4067-a38a-87a35a3c2749 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.646775] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97bf7e88-36fb-43cc-9663-094235b9780d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.653228] env[62522]: DEBUG oslo_vmware.api [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1170.653228] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f0da8b-2a8f-5b3c-36d2-5661bce64168" [ 1170.653228] env[62522]: _type = "Task" [ 1170.653228] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.659075] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62a4e347-2acb-4e45-912f-8db561427b21 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.666439] env[62522]: DEBUG oslo_vmware.api [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f0da8b-2a8f-5b3c-36d2-5661bce64168, 'name': SearchDatastore_Task, 'duration_secs': 0.009791} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.667053] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1170.667348] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1170.667538] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1170.667682] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1170.667937] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1170.668204] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-912ebf33-3bcf-48e0-80bd-8aa7f8ab4028 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.679144] env[62522]: DEBUG nova.compute.provider_tree [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1170.691023] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1170.691023] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1170.692411] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a2fdc45-36d3-4b00-beec-670531e78915 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.700347] env[62522]: DEBUG oslo_vmware.api [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1170.700347] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5286e8e3-6406-9558-ddd6-3352a17de68f" [ 1170.700347] env[62522]: _type = "Task" [ 1170.700347] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.718225] env[62522]: DEBUG oslo_vmware.api [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5286e8e3-6406-9558-ddd6-3352a17de68f, 'name': SearchDatastore_Task} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.719012] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7a5aac4-508d-4158-a8b7-a60e94f8170b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.725189] env[62522]: DEBUG oslo_vmware.api [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1170.725189] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52edebec-ec21-96d2-ee0d-973ba9614cc1" [ 1170.725189] env[62522]: _type = "Task" [ 1170.725189] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.734240] env[62522]: DEBUG oslo_vmware.api [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52edebec-ec21-96d2-ee0d-973ba9614cc1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.908295] env[62522]: DEBUG oslo_vmware.api [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416282, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080083} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.908576] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1170.909329] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9499e6aa-490b-41b9-85c3-c81102784c36 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.931540] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] 1c6451e0-2fae-4d2b-86d7-86f9537a6259/1c6451e0-2fae-4d2b-86d7-86f9537a6259.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1170.931842] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5e59ec01-ca74-4da6-bacf-bc9ca432de46 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.954272] env[62522]: DEBUG oslo_vmware.api [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1170.954272] env[62522]: value = "task-2416283" [ 1170.954272] env[62522]: _type = "Task" [ 1170.954272] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.960350] env[62522]: DEBUG oslo_vmware.api [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416283, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.965046] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "08d7e318-ea68-4807-a300-ee4a7993647d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1170.965046] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "08d7e318-ea68-4807-a300-ee4a7993647d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1171.014767] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "93a2505e-814d-4809-90a9-0bc215406efd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1171.015337] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "93a2505e-814d-4809-90a9-0bc215406efd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1171.015337] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "93a2505e-814d-4809-90a9-0bc215406efd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1171.015498] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "93a2505e-814d-4809-90a9-0bc215406efd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1171.015612] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "93a2505e-814d-4809-90a9-0bc215406efd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1171.017750] env[62522]: INFO nova.compute.manager [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Terminating instance [ 1171.040219] env[62522]: INFO nova.compute.manager [-] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Took 1.85 seconds to deallocate network for instance. [ 1171.183401] env[62522]: DEBUG nova.scheduler.client.report [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1171.238408] env[62522]: DEBUG oslo_vmware.api [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52edebec-ec21-96d2-ee0d-973ba9614cc1, 'name': SearchDatastore_Task, 'duration_secs': 0.009634} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.238408] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1171.238408] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b/24cf2f15-6f6a-4ded-b2fb-85093fddbf2b.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1171.238408] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-994b0b30-aff3-47c1-b952-c91e1644df50 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.243146] env[62522]: DEBUG oslo_vmware.api [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1171.243146] env[62522]: value = "task-2416284" [ 1171.243146] env[62522]: _type = "Task" [ 1171.243146] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.250778] env[62522]: DEBUG oslo_vmware.api [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416284, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.359314] env[62522]: DEBUG nova.network.neutron [req-d0e0ee3a-52a2-4d32-831e-799e3f11dc27 req-91b21dc7-06a8-41dd-b139-e95307b11d29 service nova] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Updated VIF entry in instance network info cache for port d25e13a4-7bac-4701-afa0-5fdd63ad7f3c. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1171.359617] env[62522]: DEBUG nova.network.neutron [req-d0e0ee3a-52a2-4d32-831e-799e3f11dc27 req-91b21dc7-06a8-41dd-b139-e95307b11d29 service nova] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Updating instance_info_cache with network_info: [{"id": "d25e13a4-7bac-4701-afa0-5fdd63ad7f3c", "address": "fa:16:3e:02:12:f6", "network": {"id": "21d0ca22-5509-4f9b-b167-f9fde2dac808", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-547933771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a06421250694a98b13ff34ad816dc75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd25e13a4-7b", "ovs_interfaceid": "d25e13a4-7bac-4701-afa0-5fdd63ad7f3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1171.465240] env[62522]: DEBUG oslo_vmware.api [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416283, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.468351] env[62522]: DEBUG nova.compute.manager [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1171.525872] env[62522]: DEBUG nova.compute.manager [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1171.526157] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1171.527346] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6a5cb81-18fc-46d0-9bcb-36e0baee723b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.539733] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1171.540209] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0ca2fb05-dfff-48e5-9ac7-83581cd7195f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.546672] env[62522]: DEBUG oslo_concurrency.lockutils [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1171.549828] env[62522]: DEBUG oslo_vmware.api [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 1171.549828] env[62522]: value = "task-2416285" [ 1171.549828] env[62522]: _type = "Task" [ 1171.549828] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.555959] env[62522]: INFO nova.network.neutron [None req-68435f65-1b62-4018-9b35-7040f1a81cbe tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Port 9371b30e-3fec-41e5-88af-f58ce423428e from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1171.556601] env[62522]: DEBUG nova.network.neutron [None req-68435f65-1b62-4018-9b35-7040f1a81cbe tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Updating instance_info_cache with network_info: [{"id": "43d86dfd-5c95-438b-808b-91ab1078323b", "address": "fa:16:3e:ee:37:0f", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43d86dfd-5c", "ovs_interfaceid": "43d86dfd-5c95-438b-808b-91ab1078323b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1171.562772] env[62522]: DEBUG oslo_vmware.api [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416285, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.688385] env[62522]: DEBUG oslo_concurrency.lockutils [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.220s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1171.688706] env[62522]: INFO nova.compute.manager [None req-786b7b72-e834-49bc-9ddf-db712f33592e tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Successfully reverted task state from rebuilding on failure for instance. [ 1171.695220] env[62522]: DEBUG oslo_concurrency.lockutils [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.149s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1171.695474] env[62522]: DEBUG nova.objects.instance [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lazy-loading 'resources' on Instance uuid c28d2907-5b59-4df8-91a8-4ba0f2047d89 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1171.755274] env[62522]: DEBUG oslo_vmware.api [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416284, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501289} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.755549] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b/24cf2f15-6f6a-4ded-b2fb-85093fddbf2b.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1171.755760] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1171.756017] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3cb5fb48-1d8f-4ee6-aa33-044acb9bcdfd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.762995] env[62522]: DEBUG oslo_vmware.api [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1171.762995] env[62522]: value = "task-2416286" [ 1171.762995] env[62522]: _type = "Task" [ 1171.762995] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.772934] env[62522]: DEBUG oslo_vmware.api [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416286, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.798493] env[62522]: DEBUG nova.network.neutron [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Updating instance_info_cache with network_info: [{"id": "55c5c37a-1605-4edb-957e-04160d41ff01", "address": "fa:16:3e:07:85:b9", "network": {"id": "be0fe686-4986-439e-aa82-5cbe54104c8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-558443835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bdd1f5caf09454d808bcdc15df2d3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55c5c37a-16", "ovs_interfaceid": "55c5c37a-1605-4edb-957e-04160d41ff01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1171.865840] env[62522]: DEBUG oslo_concurrency.lockutils [req-d0e0ee3a-52a2-4d32-831e-799e3f11dc27 req-91b21dc7-06a8-41dd-b139-e95307b11d29 service nova] Releasing lock "refresh_cache-24cf2f15-6f6a-4ded-b2fb-85093fddbf2b" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1171.866288] env[62522]: DEBUG oslo_concurrency.lockutils [None req-977da8e6-4950-4488-9382-f2eb8dae85af tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Acquiring lock "981a4839-28d0-4d91-88cd-99c1d263ca4d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1171.866523] env[62522]: DEBUG oslo_concurrency.lockutils [None req-977da8e6-4950-4488-9382-f2eb8dae85af tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Lock "981a4839-28d0-4d91-88cd-99c1d263ca4d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1171.866739] env[62522]: DEBUG oslo_concurrency.lockutils [None req-977da8e6-4950-4488-9382-f2eb8dae85af tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Acquiring lock "981a4839-28d0-4d91-88cd-99c1d263ca4d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1171.866925] env[62522]: DEBUG oslo_concurrency.lockutils [None req-977da8e6-4950-4488-9382-f2eb8dae85af tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Lock "981a4839-28d0-4d91-88cd-99c1d263ca4d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1171.867111] env[62522]: DEBUG oslo_concurrency.lockutils [None req-977da8e6-4950-4488-9382-f2eb8dae85af tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Lock "981a4839-28d0-4d91-88cd-99c1d263ca4d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1171.869472] env[62522]: INFO nova.compute.manager [None req-977da8e6-4950-4488-9382-f2eb8dae85af tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Terminating instance [ 1171.962369] env[62522]: DEBUG oslo_vmware.api [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416283, 'name': ReconfigVM_Task, 'duration_secs': 0.570339} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.962694] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Reconfigured VM instance instance-00000066 to attach disk [datastore1] 1c6451e0-2fae-4d2b-86d7-86f9537a6259/1c6451e0-2fae-4d2b-86d7-86f9537a6259.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1171.963889] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3e30b8b1-5d2e-403e-8b29-e6947bef6e11 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.969685] env[62522]: DEBUG oslo_vmware.api [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1171.969685] env[62522]: value = "task-2416287" [ 1171.969685] env[62522]: _type = "Task" [ 1171.969685] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.979348] env[62522]: DEBUG oslo_vmware.api [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416287, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.997548] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1172.059144] env[62522]: DEBUG oslo_vmware.api [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416285, 'name': PowerOffVM_Task, 'duration_secs': 0.28832} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.059375] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1172.059560] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1172.059814] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-38f93d71-e609-47cb-b587-6eadb0b812ed {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.064032] env[62522]: DEBUG oslo_concurrency.lockutils [None req-68435f65-1b62-4018-9b35-7040f1a81cbe tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Releasing lock "refresh_cache-93a2505e-814d-4809-90a9-0bc215406efd" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1172.122269] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1172.122546] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1172.122669] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Deleting the datastore file [datastore2] 93a2505e-814d-4809-90a9-0bc215406efd {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1172.122937] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a4117b06-811f-41ed-a1d0-d40c950beb7e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.129974] env[62522]: DEBUG oslo_vmware.api [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 1172.129974] env[62522]: value = "task-2416289" [ 1172.129974] env[62522]: _type = "Task" [ 1172.129974] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.137558] env[62522]: DEBUG oslo_vmware.api [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416289, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.275027] env[62522]: DEBUG oslo_vmware.api [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416286, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075826} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.275336] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1172.276144] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-426e308d-02fa-4dcf-8856-7bf22663b7f2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.299094] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b/24cf2f15-6f6a-4ded-b2fb-85093fddbf2b.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1172.301275] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-11c64f12-2287-4b4e-b5af-3985572b193b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.314864] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Releasing lock "refresh_cache-892926ef-3044-497c-8fc8-30cd298e4311" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1172.315070] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Updated the network info_cache for instance {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 1172.315490] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1172.316018] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1172.316178] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1172.316352] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1172.316507] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1172.316640] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62522) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1172.316784] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1172.320912] env[62522]: DEBUG oslo_vmware.api [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1172.320912] env[62522]: value = "task-2416290" [ 1172.320912] env[62522]: _type = "Task" [ 1172.320912] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.326095] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6785442-63d1-48ca-b44e-1a796c9fcb80 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.332304] env[62522]: DEBUG oslo_vmware.api [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416290, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.336942] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fff1b77-1837-426d-afeb-b825dd2c6981 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.371754] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3541bd48-6ad7-42d9-8022-0a5cb660cf25 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.375352] env[62522]: DEBUG nova.compute.manager [None req-977da8e6-4950-4488-9382-f2eb8dae85af tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1172.375674] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-87698ab9-ec54-46a5-bc17-a67cd6f1985d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.383354] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8002fe9-463a-4837-a1a3-097abedd5d58 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.389923] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51ee6f2d-5973-4e4f-aa65-db77b889000c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.409663] env[62522]: DEBUG nova.compute.provider_tree [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1172.417896] env[62522]: WARNING nova.virt.vmwareapi.driver [None req-977da8e6-4950-4488-9382-f2eb8dae85af tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 981a4839-28d0-4d91-88cd-99c1d263ca4d could not be found. [ 1172.418123] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-977da8e6-4950-4488-9382-f2eb8dae85af tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1172.418648] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0ab8a724-79a2-445a-9ca0-98aa67958aa4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.426907] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bad2b659-ba20-4c16-8a4c-a19716c50c8b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.453023] env[62522]: WARNING nova.virt.vmwareapi.vmops [None req-977da8e6-4950-4488-9382-f2eb8dae85af tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 981a4839-28d0-4d91-88cd-99c1d263ca4d could not be found. [ 1172.453265] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-977da8e6-4950-4488-9382-f2eb8dae85af tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1172.453474] env[62522]: INFO nova.compute.manager [None req-977da8e6-4950-4488-9382-f2eb8dae85af tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Took 0.08 seconds to destroy the instance on the hypervisor. [ 1172.453722] env[62522]: DEBUG oslo.service.loopingcall [None req-977da8e6-4950-4488-9382-f2eb8dae85af tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1172.453960] env[62522]: DEBUG nova.compute.manager [-] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1172.454100] env[62522]: DEBUG nova.network.neutron [-] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1172.478494] env[62522]: DEBUG oslo_vmware.api [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416287, 'name': Rename_Task, 'duration_secs': 0.147494} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.478762] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1172.479029] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-66ee32fb-457e-4535-ae04-c1b1da39b5c2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.486207] env[62522]: DEBUG oslo_vmware.api [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1172.486207] env[62522]: value = "task-2416291" [ 1172.486207] env[62522]: _type = "Task" [ 1172.486207] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.495623] env[62522]: DEBUG oslo_vmware.api [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416291, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.567900] env[62522]: DEBUG oslo_concurrency.lockutils [None req-68435f65-1b62-4018-9b35-7040f1a81cbe tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "interface-93a2505e-814d-4809-90a9-0bc215406efd-9371b30e-3fec-41e5-88af-f58ce423428e" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.969s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1172.641441] env[62522]: DEBUG oslo_vmware.api [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416289, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.181096} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.641732] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1172.641919] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1172.642138] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1172.642339] env[62522]: INFO nova.compute.manager [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1172.642628] env[62522]: DEBUG oslo.service.loopingcall [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1172.642835] env[62522]: DEBUG nova.compute.manager [-] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1172.642938] env[62522]: DEBUG nova.network.neutron [-] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1172.820718] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1172.832021] env[62522]: DEBUG oslo_vmware.api [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416290, 'name': ReconfigVM_Task, 'duration_secs': 0.362574} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.832479] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Reconfigured VM instance instance-00000067 to attach disk [datastore1] 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b/24cf2f15-6f6a-4ded-b2fb-85093fddbf2b.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1172.833244] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c95a3744-40a6-4814-85a5-f123ff251ab8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.840591] env[62522]: DEBUG oslo_vmware.api [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1172.840591] env[62522]: value = "task-2416292" [ 1172.840591] env[62522]: _type = "Task" [ 1172.840591] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.851254] env[62522]: DEBUG oslo_vmware.api [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416292, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.920671] env[62522]: DEBUG nova.scheduler.client.report [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1172.996958] env[62522]: DEBUG oslo_vmware.api [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416291, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.184729] env[62522]: DEBUG nova.compute.manager [req-409aba7f-e808-4621-8c7d-bb728b7cf717 req-d6969cd9-2786-45c0-ac7a-3dc50e7c31a8 service nova] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Received event network-vif-deleted-608eb061-5051-4459-a45a-6359abaf3221 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1173.184932] env[62522]: INFO nova.compute.manager [req-409aba7f-e808-4621-8c7d-bb728b7cf717 req-d6969cd9-2786-45c0-ac7a-3dc50e7c31a8 service nova] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Neutron deleted interface 608eb061-5051-4459-a45a-6359abaf3221; detaching it from the instance and deleting it from the info cache [ 1173.185123] env[62522]: DEBUG nova.network.neutron [req-409aba7f-e808-4621-8c7d-bb728b7cf717 req-d6969cd9-2786-45c0-ac7a-3dc50e7c31a8 service nova] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1173.351147] env[62522]: DEBUG oslo_vmware.api [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416292, 'name': Rename_Task, 'duration_secs': 0.419828} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.351510] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1173.351852] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-17895931-a1d3-42a2-b878-bfa993854557 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.358729] env[62522]: DEBUG oslo_vmware.api [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1173.358729] env[62522]: value = "task-2416293" [ 1173.358729] env[62522]: _type = "Task" [ 1173.358729] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.366524] env[62522]: DEBUG oslo_vmware.api [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416293, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.425682] env[62522]: DEBUG oslo_concurrency.lockutils [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.730s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1173.428119] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.431s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1173.430056] env[62522]: INFO nova.compute.claims [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1173.448222] env[62522]: INFO nova.scheduler.client.report [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Deleted allocations for instance c28d2907-5b59-4df8-91a8-4ba0f2047d89 [ 1173.498936] env[62522]: DEBUG oslo_vmware.api [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416291, 'name': PowerOnVM_Task, 'duration_secs': 0.562787} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.499162] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1173.499260] env[62522]: INFO nova.compute.manager [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Took 7.79 seconds to spawn the instance on the hypervisor. [ 1173.499640] env[62522]: DEBUG nova.compute.manager [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1173.500982] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cdbbf91-2405-418c-9af2-cec1b640bde3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.667095] env[62522]: DEBUG nova.network.neutron [-] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1173.690212] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-66552f32-bee2-47f6-bb30-233ce5207090 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.702905] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67627a64-232b-4665-b107-c56b7f36345f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.730580] env[62522]: DEBUG nova.compute.manager [req-409aba7f-e808-4621-8c7d-bb728b7cf717 req-d6969cd9-2786-45c0-ac7a-3dc50e7c31a8 service nova] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Detach interface failed, port_id=608eb061-5051-4459-a45a-6359abaf3221, reason: Instance 981a4839-28d0-4d91-88cd-99c1d263ca4d could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1173.869347] env[62522]: DEBUG oslo_vmware.api [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416293, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.960117] env[62522]: DEBUG oslo_concurrency.lockutils [None req-db440b63-715c-4e5e-b43f-331b1e134343 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "c28d2907-5b59-4df8-91a8-4ba0f2047d89" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.401s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1174.022224] env[62522]: INFO nova.compute.manager [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Took 12.55 seconds to build instance. [ 1174.170893] env[62522]: INFO nova.compute.manager [-] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Took 1.72 seconds to deallocate network for instance. [ 1174.196191] env[62522]: DEBUG nova.network.neutron [-] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1174.373393] env[62522]: DEBUG oslo_vmware.api [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416293, 'name': PowerOnVM_Task, 'duration_secs': 0.530863} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.373722] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1174.377108] env[62522]: INFO nova.compute.manager [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Took 6.43 seconds to spawn the instance on the hypervisor. [ 1174.377108] env[62522]: DEBUG nova.compute.manager [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1174.377108] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-320c178d-3121-404e-8dd2-09119a7ca90f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.524812] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8586640-9e74-41a7-82d9-c9b9528d9c6f tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "1c6451e0-2fae-4d2b-86d7-86f9537a6259" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.061s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1174.660190] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-504867c0-b045-45c1-876c-610d711ee4ea {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.667864] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7b80119-c48e-49cd-965a-7fa7ae087c21 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.699989] env[62522]: INFO nova.compute.manager [-] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Took 2.06 seconds to deallocate network for instance. [ 1174.702560] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8470290-b471-4168-a3fa-2aa6d309c450 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.716695] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3438b47-c1dd-4609-bd1c-5e5e0ebd4cb5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.731075] env[62522]: DEBUG nova.compute.provider_tree [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1174.733285] env[62522]: INFO nova.compute.manager [None req-977da8e6-4950-4488-9382-f2eb8dae85af tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Took 0.56 seconds to detach 1 volumes for instance. [ 1174.735092] env[62522]: DEBUG nova.compute.manager [None req-977da8e6-4950-4488-9382-f2eb8dae85af tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Deleting volume: d0f4a6c8-a536-4b93-85d4-2b0510f42669 {{(pid=62522) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1174.891475] env[62522]: INFO nova.compute.manager [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Took 11.16 seconds to build instance. [ 1175.213116] env[62522]: DEBUG nova.compute.manager [req-aec8a1fa-3764-424c-8e8f-093a5fd20cdc req-bb2c0a4d-d0ff-422a-830a-db586e76ac47 service nova] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Received event network-vif-deleted-43d86dfd-5c95-438b-808b-91ab1078323b {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1175.217666] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1175.237780] env[62522]: DEBUG nova.scheduler.client.report [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1175.290468] env[62522]: DEBUG oslo_concurrency.lockutils [None req-977da8e6-4950-4488-9382-f2eb8dae85af tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1175.393745] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ea6da3ef-c158-424a-9d6f-dd71f0559a7a tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "24cf2f15-6f6a-4ded-b2fb-85093fddbf2b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.667s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1175.741278] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.313s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1175.741854] env[62522]: DEBUG nova.compute.manager [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1175.744996] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 2.925s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1175.745222] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1175.745397] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62522) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1175.745743] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.529s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1175.746416] env[62522]: DEBUG nova.objects.instance [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lazy-loading 'resources' on Instance uuid 93a2505e-814d-4809-90a9-0bc215406efd {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1175.752562] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22eb6cab-f7a9-41b5-bf52-3ee255437bf5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.761523] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea462545-be06-4656-8346-fadf3451a115 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.779171] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b457b49-2f49-46d5-a9b6-24b5196ab67b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.785950] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61bd1ba3-4e3b-4be1-8664-272e22548c62 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.818908] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180544MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=62522) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1175.819156] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1175.841353] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "24cf2f15-6f6a-4ded-b2fb-85093fddbf2b" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1175.841927] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "24cf2f15-6f6a-4ded-b2fb-85093fddbf2b" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1175.842151] env[62522]: INFO nova.compute.manager [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Shelving [ 1176.256028] env[62522]: DEBUG nova.compute.utils [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1176.256028] env[62522]: DEBUG nova.compute.manager [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1176.256028] env[62522]: DEBUG nova.network.neutron [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1176.305185] env[62522]: DEBUG nova.policy [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3a4ba3a3d3a34495b7a7e0618577d60f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '071dd4c295a54e388099d5bf0f4e300b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1176.516412] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-082447b4-1f40-40d0-9059-a14ce2f2a5ba {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.528348] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65375e62-78b2-439e-91ff-91b009510006 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.564534] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65711be5-aff7-4d65-8251-c6d9cfadbcb6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.574950] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6edebf32-98e9-4355-bbe9-2d8413143a52 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.591391] env[62522]: DEBUG nova.compute.provider_tree [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1176.669911] env[62522]: DEBUG nova.network.neutron [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Successfully created port: 2eb2d7ef-0c29-4fda-947c-b02485470817 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1176.691246] env[62522]: DEBUG oslo_concurrency.lockutils [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "cb7a19f1-6093-47ee-bbbc-a75dd5423f32" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1176.691599] env[62522]: DEBUG oslo_concurrency.lockutils [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "cb7a19f1-6093-47ee-bbbc-a75dd5423f32" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1176.759787] env[62522]: DEBUG nova.compute.manager [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1176.853071] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1176.854172] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0eff4e0d-ebc3-45ce-b39b-d371c6467a5b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.862069] env[62522]: DEBUG oslo_vmware.api [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1176.862069] env[62522]: value = "task-2416295" [ 1176.862069] env[62522]: _type = "Task" [ 1176.862069] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.875439] env[62522]: DEBUG oslo_vmware.api [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416295, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.098154] env[62522]: DEBUG nova.scheduler.client.report [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1177.196792] env[62522]: DEBUG nova.compute.manager [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1177.250635] env[62522]: DEBUG nova.compute.manager [req-2cb66985-cd25-4b30-82a2-35b621613034 req-5843cec0-2ba2-4e9f-bdcc-8462559ade49 service nova] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Received event network-changed-66b8c64e-5981-4cc9-b51a-df5bce03233c {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1177.250635] env[62522]: DEBUG nova.compute.manager [req-2cb66985-cd25-4b30-82a2-35b621613034 req-5843cec0-2ba2-4e9f-bdcc-8462559ade49 service nova] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Refreshing instance network info cache due to event network-changed-66b8c64e-5981-4cc9-b51a-df5bce03233c. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1177.250635] env[62522]: DEBUG oslo_concurrency.lockutils [req-2cb66985-cd25-4b30-82a2-35b621613034 req-5843cec0-2ba2-4e9f-bdcc-8462559ade49 service nova] Acquiring lock "refresh_cache-1c6451e0-2fae-4d2b-86d7-86f9537a6259" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1177.250635] env[62522]: DEBUG oslo_concurrency.lockutils [req-2cb66985-cd25-4b30-82a2-35b621613034 req-5843cec0-2ba2-4e9f-bdcc-8462559ade49 service nova] Acquired lock "refresh_cache-1c6451e0-2fae-4d2b-86d7-86f9537a6259" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1177.250635] env[62522]: DEBUG nova.network.neutron [req-2cb66985-cd25-4b30-82a2-35b621613034 req-5843cec0-2ba2-4e9f-bdcc-8462559ade49 service nova] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Refreshing network info cache for port 66b8c64e-5981-4cc9-b51a-df5bce03233c {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1177.372734] env[62522]: DEBUG oslo_vmware.api [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416295, 'name': PowerOffVM_Task, 'duration_secs': 0.241767} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.373453] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1177.374652] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b392488-6cee-466c-a4e2-102db70f3a9b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.396782] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dbae149-4c86-4da4-b457-ef2d8ab1a32e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.603366] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.857s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1177.605687] env[62522]: DEBUG oslo_concurrency.lockutils [None req-977da8e6-4950-4488-9382-f2eb8dae85af tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.315s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1177.605909] env[62522]: DEBUG nova.objects.instance [None req-977da8e6-4950-4488-9382-f2eb8dae85af tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Lazy-loading 'resources' on Instance uuid 981a4839-28d0-4d91-88cd-99c1d263ca4d {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1177.628239] env[62522]: INFO nova.scheduler.client.report [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Deleted allocations for instance 93a2505e-814d-4809-90a9-0bc215406efd [ 1177.720649] env[62522]: DEBUG oslo_concurrency.lockutils [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1177.770266] env[62522]: DEBUG nova.compute.manager [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1177.812365] env[62522]: DEBUG nova.virt.hardware [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1177.812613] env[62522]: DEBUG nova.virt.hardware [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1177.812770] env[62522]: DEBUG nova.virt.hardware [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1177.812952] env[62522]: DEBUG nova.virt.hardware [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1177.813154] env[62522]: DEBUG nova.virt.hardware [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1177.813422] env[62522]: DEBUG nova.virt.hardware [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1177.813659] env[62522]: DEBUG nova.virt.hardware [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1177.813821] env[62522]: DEBUG nova.virt.hardware [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1177.813992] env[62522]: DEBUG nova.virt.hardware [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1177.814173] env[62522]: DEBUG nova.virt.hardware [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1177.814346] env[62522]: DEBUG nova.virt.hardware [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1177.815576] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d223400a-6f60-406d-a89f-0cbd7b105a11 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.827392] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62de858e-5e9c-44ee-b983-11eb34acd8d9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.910172] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Creating Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1177.910172] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-796b3c02-077c-4416-922c-bb9a0d3c6bcc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.917866] env[62522]: DEBUG oslo_vmware.api [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1177.917866] env[62522]: value = "task-2416296" [ 1177.917866] env[62522]: _type = "Task" [ 1177.917866] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.926609] env[62522]: DEBUG oslo_vmware.api [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416296, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.148952] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9aed9564-7db5-409b-9325-4957590e074c tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "93a2505e-814d-4809-90a9-0bc215406efd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.134s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1178.211790] env[62522]: DEBUG nova.network.neutron [req-2cb66985-cd25-4b30-82a2-35b621613034 req-5843cec0-2ba2-4e9f-bdcc-8462559ade49 service nova] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Updated VIF entry in instance network info cache for port 66b8c64e-5981-4cc9-b51a-df5bce03233c. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1178.213102] env[62522]: DEBUG nova.network.neutron [req-2cb66985-cd25-4b30-82a2-35b621613034 req-5843cec0-2ba2-4e9f-bdcc-8462559ade49 service nova] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Updating instance_info_cache with network_info: [{"id": "66b8c64e-5981-4cc9-b51a-df5bce03233c", "address": "fa:16:3e:ab:2f:6d", "network": {"id": "2a4f6f01-ad28-4f8f-b835-ea86e1791f49", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1577320995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82346c440c3343a0a5c233a48203a13c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66b8c64e-59", "ovs_interfaceid": "66b8c64e-5981-4cc9-b51a-df5bce03233c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.262789] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3388ff20-58bb-406a-8363-cde6c2cb0bdd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.274359] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fe68f8b-1e2a-4934-ae19-4d93f06558d7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.311224] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-916f998f-d19f-4091-9dd4-616aa0a524d6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.314699] env[62522]: DEBUG oslo_concurrency.lockutils [None req-80500ddb-0bba-4a92-a3fe-67677dce01ad tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquiring lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1178.314934] env[62522]: DEBUG oslo_concurrency.lockutils [None req-80500ddb-0bba-4a92-a3fe-67677dce01ad tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1178.320028] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1619cf20-754d-4a76-8f2e-d1a8881c321c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.339169] env[62522]: DEBUG nova.compute.provider_tree [None req-977da8e6-4950-4488-9382-f2eb8dae85af tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1178.427495] env[62522]: DEBUG oslo_vmware.api [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416296, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.484840] env[62522]: DEBUG nova.network.neutron [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Successfully updated port: 2eb2d7ef-0c29-4fda-947c-b02485470817 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1178.717962] env[62522]: DEBUG oslo_concurrency.lockutils [req-2cb66985-cd25-4b30-82a2-35b621613034 req-5843cec0-2ba2-4e9f-bdcc-8462559ade49 service nova] Releasing lock "refresh_cache-1c6451e0-2fae-4d2b-86d7-86f9537a6259" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1178.820718] env[62522]: DEBUG nova.compute.utils [None req-80500ddb-0bba-4a92-a3fe-67677dce01ad tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1178.842816] env[62522]: DEBUG nova.scheduler.client.report [None req-977da8e6-4950-4488-9382-f2eb8dae85af tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1178.887132] env[62522]: DEBUG oslo_concurrency.lockutils [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "892926ef-3044-497c-8fc8-30cd298e4311" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1178.887417] env[62522]: DEBUG oslo_concurrency.lockutils [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "892926ef-3044-497c-8fc8-30cd298e4311" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1178.887840] env[62522]: DEBUG oslo_concurrency.lockutils [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "892926ef-3044-497c-8fc8-30cd298e4311-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1178.888051] env[62522]: DEBUG oslo_concurrency.lockutils [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "892926ef-3044-497c-8fc8-30cd298e4311-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1178.888230] env[62522]: DEBUG oslo_concurrency.lockutils [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "892926ef-3044-497c-8fc8-30cd298e4311-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1178.890250] env[62522]: INFO nova.compute.manager [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Terminating instance [ 1178.933072] env[62522]: DEBUG oslo_vmware.api [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416296, 'name': CreateSnapshot_Task, 'duration_secs': 0.589552} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.933347] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Created Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1178.934118] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-063d6a80-5ee4-4ba6-9979-57f9f51f6adc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.988042] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "refresh_cache-08d7e318-ea68-4807-a300-ee4a7993647d" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1178.988042] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquired lock "refresh_cache-08d7e318-ea68-4807-a300-ee4a7993647d" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.988383] env[62522]: DEBUG nova.network.neutron [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1179.307990] env[62522]: DEBUG nova.compute.manager [req-2e213c09-774f-465c-ad89-b45641636550 req-0eca0031-5486-455c-a3ef-3ff0e2b439ad service nova] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Received event network-vif-plugged-2eb2d7ef-0c29-4fda-947c-b02485470817 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1179.308247] env[62522]: DEBUG oslo_concurrency.lockutils [req-2e213c09-774f-465c-ad89-b45641636550 req-0eca0031-5486-455c-a3ef-3ff0e2b439ad service nova] Acquiring lock "08d7e318-ea68-4807-a300-ee4a7993647d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1179.308456] env[62522]: DEBUG oslo_concurrency.lockutils [req-2e213c09-774f-465c-ad89-b45641636550 req-0eca0031-5486-455c-a3ef-3ff0e2b439ad service nova] Lock "08d7e318-ea68-4807-a300-ee4a7993647d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1179.308620] env[62522]: DEBUG oslo_concurrency.lockutils [req-2e213c09-774f-465c-ad89-b45641636550 req-0eca0031-5486-455c-a3ef-3ff0e2b439ad service nova] Lock "08d7e318-ea68-4807-a300-ee4a7993647d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1179.308794] env[62522]: DEBUG nova.compute.manager [req-2e213c09-774f-465c-ad89-b45641636550 req-0eca0031-5486-455c-a3ef-3ff0e2b439ad service nova] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] No waiting events found dispatching network-vif-plugged-2eb2d7ef-0c29-4fda-947c-b02485470817 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1179.308975] env[62522]: WARNING nova.compute.manager [req-2e213c09-774f-465c-ad89-b45641636550 req-0eca0031-5486-455c-a3ef-3ff0e2b439ad service nova] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Received unexpected event network-vif-plugged-2eb2d7ef-0c29-4fda-947c-b02485470817 for instance with vm_state building and task_state spawning. [ 1179.309155] env[62522]: DEBUG nova.compute.manager [req-2e213c09-774f-465c-ad89-b45641636550 req-0eca0031-5486-455c-a3ef-3ff0e2b439ad service nova] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Received event network-changed-2eb2d7ef-0c29-4fda-947c-b02485470817 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1179.309311] env[62522]: DEBUG nova.compute.manager [req-2e213c09-774f-465c-ad89-b45641636550 req-0eca0031-5486-455c-a3ef-3ff0e2b439ad service nova] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Refreshing instance network info cache due to event network-changed-2eb2d7ef-0c29-4fda-947c-b02485470817. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1179.309473] env[62522]: DEBUG oslo_concurrency.lockutils [req-2e213c09-774f-465c-ad89-b45641636550 req-0eca0031-5486-455c-a3ef-3ff0e2b439ad service nova] Acquiring lock "refresh_cache-08d7e318-ea68-4807-a300-ee4a7993647d" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1179.323400] env[62522]: DEBUG oslo_concurrency.lockutils [None req-80500ddb-0bba-4a92-a3fe-67677dce01ad tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1179.347461] env[62522]: DEBUG oslo_concurrency.lockutils [None req-977da8e6-4950-4488-9382-f2eb8dae85af tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.742s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1179.349684] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 3.531s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1179.396871] env[62522]: DEBUG nova.compute.manager [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1179.397105] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1179.399661] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06bf5815-a3f0-45ff-acbe-7a70ff67457e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.407828] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1179.408101] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-662a3b10-3d73-474d-be41-5531cfe0ebd1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.415322] env[62522]: DEBUG oslo_vmware.api [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 1179.415322] env[62522]: value = "task-2416297" [ 1179.415322] env[62522]: _type = "Task" [ 1179.415322] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.424359] env[62522]: DEBUG oslo_vmware.api [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416297, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.458317] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Creating linked-clone VM from snapshot {{(pid=62522) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1179.459019] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b255183d-b9ae-4c38-bcbe-00b440ae9133 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.467748] env[62522]: DEBUG oslo_vmware.api [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1179.467748] env[62522]: value = "task-2416298" [ 1179.467748] env[62522]: _type = "Task" [ 1179.467748] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.476536] env[62522]: DEBUG oslo_vmware.api [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416298, 'name': CloneVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.492981] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "a4cb5c19-9087-4354-9689-a99ae8924dc1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1179.493268] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "a4cb5c19-9087-4354-9689-a99ae8924dc1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1179.530599] env[62522]: DEBUG nova.network.neutron [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1179.735879] env[62522]: DEBUG nova.network.neutron [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Updating instance_info_cache with network_info: [{"id": "2eb2d7ef-0c29-4fda-947c-b02485470817", "address": "fa:16:3e:70:9f:11", "network": {"id": "fd993a8b-571c-4873-a5d6-4d8c60e23d38", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2084093882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "071dd4c295a54e388099d5bf0f4e300b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2eb2d7ef-0c", "ovs_interfaceid": "2eb2d7ef-0c29-4fda-947c-b02485470817", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1179.869253] env[62522]: DEBUG oslo_concurrency.lockutils [None req-977da8e6-4950-4488-9382-f2eb8dae85af tempest-ServerActionsV293TestJSON-1506818009 tempest-ServerActionsV293TestJSON-1506818009-project-member] Lock "981a4839-28d0-4d91-88cd-99c1d263ca4d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.003s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1179.925918] env[62522]: DEBUG oslo_vmware.api [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416297, 'name': PowerOffVM_Task, 'duration_secs': 0.258865} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.926205] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1179.926384] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1179.926625] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dd85b9ab-9194-44ce-bd43-9a9e577dca36 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.976964] env[62522]: DEBUG oslo_vmware.api [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416298, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.989934] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1179.990170] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1179.990355] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Deleting the datastore file [datastore1] 892926ef-3044-497c-8fc8-30cd298e4311 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1179.990607] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bd306c66-cdb3-41b7-b6fe-883ab9cadffb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.997219] env[62522]: DEBUG oslo_vmware.api [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for the task: (returnval){ [ 1179.997219] env[62522]: value = "task-2416300" [ 1179.997219] env[62522]: _type = "Task" [ 1179.997219] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.001616] env[62522]: DEBUG nova.compute.manager [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1180.008672] env[62522]: DEBUG oslo_vmware.api [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416300, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.238329] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Releasing lock "refresh_cache-08d7e318-ea68-4807-a300-ee4a7993647d" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1180.238690] env[62522]: DEBUG nova.compute.manager [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Instance network_info: |[{"id": "2eb2d7ef-0c29-4fda-947c-b02485470817", "address": "fa:16:3e:70:9f:11", "network": {"id": "fd993a8b-571c-4873-a5d6-4d8c60e23d38", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2084093882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "071dd4c295a54e388099d5bf0f4e300b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2eb2d7ef-0c", "ovs_interfaceid": "2eb2d7ef-0c29-4fda-947c-b02485470817", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1180.239017] env[62522]: DEBUG oslo_concurrency.lockutils [req-2e213c09-774f-465c-ad89-b45641636550 req-0eca0031-5486-455c-a3ef-3ff0e2b439ad service nova] Acquired lock "refresh_cache-08d7e318-ea68-4807-a300-ee4a7993647d" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1180.239206] env[62522]: DEBUG nova.network.neutron [req-2e213c09-774f-465c-ad89-b45641636550 req-0eca0031-5486-455c-a3ef-3ff0e2b439ad service nova] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Refreshing network info cache for port 2eb2d7ef-0c29-4fda-947c-b02485470817 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1180.240755] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:70:9f:11', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd7b5f1ef-d4b9-4ec3-b047-17e4cb349d25', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2eb2d7ef-0c29-4fda-947c-b02485470817', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1180.255574] env[62522]: DEBUG oslo.service.loopingcall [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1180.259093] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1180.259536] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-798ba37f-5566-4793-a810-78390347ab47 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.280908] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1180.280908] env[62522]: value = "task-2416301" [ 1180.280908] env[62522]: _type = "Task" [ 1180.280908] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.290591] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416301, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.380222] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 892926ef-3044-497c-8fc8-30cd298e4311 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1180.380452] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance b31195c2-29f4-475c-baa7-fcb4791b7278 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1180.380614] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance f3894644-eb7e-4a6d-9029-4cd30466d6f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1180.380782] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 1c6451e0-2fae-4d2b-86d7-86f9537a6259 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1180.380948] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1180.381128] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 08d7e318-ea68-4807-a300-ee4a7993647d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1180.383311] env[62522]: DEBUG oslo_concurrency.lockutils [None req-80500ddb-0bba-4a92-a3fe-67677dce01ad tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquiring lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1180.383629] env[62522]: DEBUG oslo_concurrency.lockutils [None req-80500ddb-0bba-4a92-a3fe-67677dce01ad tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1180.383929] env[62522]: INFO nova.compute.manager [None req-80500ddb-0bba-4a92-a3fe-67677dce01ad tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Attaching volume a0aa294f-c381-417e-981c-8709a38bb633 to /dev/sdb [ 1180.419732] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b66f9f8b-0edc-467e-be8e-e10fa399de05 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.426792] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e1d828-14f4-4cfa-9556-113c239926bc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.440181] env[62522]: DEBUG nova.virt.block_device [None req-80500ddb-0bba-4a92-a3fe-67677dce01ad tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Updating existing volume attachment record: 90e99013-fda4-4d3d-b044-aa917fe7268f {{(pid=62522) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1180.477645] env[62522]: DEBUG oslo_vmware.api [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416298, 'name': CloneVM_Task} progress is 95%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.506798] env[62522]: DEBUG oslo_vmware.api [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Task: {'id': task-2416300, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.239589} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.507074] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1180.507265] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1180.507442] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1180.507615] env[62522]: INFO nova.compute.manager [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1180.507862] env[62522]: DEBUG oslo.service.loopingcall [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1180.508072] env[62522]: DEBUG nova.compute.manager [-] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1180.508171] env[62522]: DEBUG nova.network.neutron [-] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1180.527430] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1180.603225] env[62522]: DEBUG nova.network.neutron [req-2e213c09-774f-465c-ad89-b45641636550 req-0eca0031-5486-455c-a3ef-3ff0e2b439ad service nova] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Updated VIF entry in instance network info cache for port 2eb2d7ef-0c29-4fda-947c-b02485470817. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1180.603697] env[62522]: DEBUG nova.network.neutron [req-2e213c09-774f-465c-ad89-b45641636550 req-0eca0031-5486-455c-a3ef-3ff0e2b439ad service nova] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Updating instance_info_cache with network_info: [{"id": "2eb2d7ef-0c29-4fda-947c-b02485470817", "address": "fa:16:3e:70:9f:11", "network": {"id": "fd993a8b-571c-4873-a5d6-4d8c60e23d38", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2084093882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "071dd4c295a54e388099d5bf0f4e300b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2eb2d7ef-0c", "ovs_interfaceid": "2eb2d7ef-0c29-4fda-947c-b02485470817", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1180.790469] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416301, 'name': CreateVM_Task, 'duration_secs': 0.329834} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.790630] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1180.791381] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1180.791570] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1180.791871] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1180.792135] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5e169a3-5410-462e-9185-0e7c59a16ce5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.796806] env[62522]: DEBUG oslo_vmware.api [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1180.796806] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e8a315-bb1d-fe97-a027-e503ee6e41f4" [ 1180.796806] env[62522]: _type = "Task" [ 1180.796806] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.804273] env[62522]: DEBUG oslo_vmware.api [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e8a315-bb1d-fe97-a027-e503ee6e41f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.889611] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance cb7a19f1-6093-47ee-bbbc-a75dd5423f32 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1180.977954] env[62522]: DEBUG oslo_vmware.api [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416298, 'name': CloneVM_Task, 'duration_secs': 1.216358} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.978323] env[62522]: INFO nova.virt.vmwareapi.vmops [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Created linked-clone VM from snapshot [ 1180.979012] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a29a4a8-b350-4903-91bd-72a57bd3d10a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.987872] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Uploading image d64c1d73-9bd0-47b2-8b2e-f26e09a3b34f {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1181.016052] env[62522]: DEBUG oslo_vmware.rw_handles [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1181.016052] env[62522]: value = "vm-489845" [ 1181.016052] env[62522]: _type = "VirtualMachine" [ 1181.016052] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1181.016454] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-677b8461-bbd7-4b90-8118-357ed2ed35b5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.023565] env[62522]: DEBUG oslo_vmware.rw_handles [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lease: (returnval){ [ 1181.023565] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529db591-22df-f2d9-94d6-dc6d2aaa459d" [ 1181.023565] env[62522]: _type = "HttpNfcLease" [ 1181.023565] env[62522]: } obtained for exporting VM: (result){ [ 1181.023565] env[62522]: value = "vm-489845" [ 1181.023565] env[62522]: _type = "VirtualMachine" [ 1181.023565] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1181.023817] env[62522]: DEBUG oslo_vmware.api [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the lease: (returnval){ [ 1181.023817] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529db591-22df-f2d9-94d6-dc6d2aaa459d" [ 1181.023817] env[62522]: _type = "HttpNfcLease" [ 1181.023817] env[62522]: } to be ready. {{(pid=62522) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1181.030438] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1181.030438] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529db591-22df-f2d9-94d6-dc6d2aaa459d" [ 1181.030438] env[62522]: _type = "HttpNfcLease" [ 1181.030438] env[62522]: } is initializing. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1181.106645] env[62522]: DEBUG oslo_concurrency.lockutils [req-2e213c09-774f-465c-ad89-b45641636550 req-0eca0031-5486-455c-a3ef-3ff0e2b439ad service nova] Releasing lock "refresh_cache-08d7e318-ea68-4807-a300-ee4a7993647d" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1181.270582] env[62522]: DEBUG nova.network.neutron [-] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1181.306973] env[62522]: DEBUG oslo_vmware.api [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e8a315-bb1d-fe97-a027-e503ee6e41f4, 'name': SearchDatastore_Task, 'duration_secs': 0.009986} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.307302] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1181.307544] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1181.307778] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1181.307927] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1181.308128] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1181.308390] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0abecb5b-203e-423b-b880-bfc83f0d7fe4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.316881] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1181.317079] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1181.317782] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6388744f-c834-4c40-9848-3e6b8b144388 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.323096] env[62522]: DEBUG oslo_vmware.api [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1181.323096] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521623ce-5bd8-f828-a574-93ae068cc0dc" [ 1181.323096] env[62522]: _type = "Task" [ 1181.323096] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.329965] env[62522]: DEBUG oslo_vmware.api [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521623ce-5bd8-f828-a574-93ae068cc0dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.333426] env[62522]: DEBUG nova.compute.manager [req-1955cf66-9fea-48a7-81fb-a04b98389101 req-da20f001-1af2-45fb-a822-4462ac96fe4e service nova] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Received event network-vif-deleted-55c5c37a-1605-4edb-957e-04160d41ff01 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1181.392533] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance a4cb5c19-9087-4354-9689-a99ae8924dc1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1181.392811] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=62522) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1181.392923] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=62522) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1181.503594] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0b2568c-4498-4be1-a279-1e3a7e98c381 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.511501] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb392559-b856-4820-9180-0df102f25e69 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.544072] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f1d254a-863b-4d8d-a8a4-85867754b852 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.549709] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1181.549709] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529db591-22df-f2d9-94d6-dc6d2aaa459d" [ 1181.549709] env[62522]: _type = "HttpNfcLease" [ 1181.549709] env[62522]: } is ready. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1181.551682] env[62522]: DEBUG oslo_vmware.rw_handles [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1181.551682] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529db591-22df-f2d9-94d6-dc6d2aaa459d" [ 1181.551682] env[62522]: _type = "HttpNfcLease" [ 1181.551682] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1181.552404] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76d41d7f-37ad-4ba3-b3cc-17f440b03e71 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.555503] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85aee7cf-277b-4510-a2eb-59d5cd26a98f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.569705] env[62522]: DEBUG nova.compute.provider_tree [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1181.573281] env[62522]: DEBUG oslo_vmware.rw_handles [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525ee7bd-c299-8184-10d3-6a6737fc011d/disk-0.vmdk from lease info. {{(pid=62522) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1181.573455] env[62522]: DEBUG oslo_vmware.rw_handles [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525ee7bd-c299-8184-10d3-6a6737fc011d/disk-0.vmdk for reading. {{(pid=62522) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1181.660488] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-08147953-4fb3-4e56-910c-326ad5b93572 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.772854] env[62522]: INFO nova.compute.manager [-] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Took 1.26 seconds to deallocate network for instance. [ 1181.834121] env[62522]: DEBUG oslo_vmware.api [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521623ce-5bd8-f828-a574-93ae068cc0dc, 'name': SearchDatastore_Task, 'duration_secs': 0.009476} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.837222] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc5d4377-542a-4bba-957c-14ce1845fa97 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.839828] env[62522]: DEBUG oslo_vmware.api [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1181.839828] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52860597-406e-7193-21de-24fab73eb15c" [ 1181.839828] env[62522]: _type = "Task" [ 1181.839828] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.848934] env[62522]: DEBUG oslo_vmware.api [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52860597-406e-7193-21de-24fab73eb15c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.076150] env[62522]: DEBUG nova.scheduler.client.report [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1182.279349] env[62522]: DEBUG oslo_concurrency.lockutils [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.349806] env[62522]: DEBUG oslo_vmware.api [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52860597-406e-7193-21de-24fab73eb15c, 'name': SearchDatastore_Task, 'duration_secs': 0.009438} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.350704] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1182.350704] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 08d7e318-ea68-4807-a300-ee4a7993647d/08d7e318-ea68-4807-a300-ee4a7993647d.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1182.351669] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6d08804d-8bf9-47d8-b038-758e8d7705b2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.359510] env[62522]: DEBUG oslo_vmware.api [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1182.359510] env[62522]: value = "task-2416304" [ 1182.359510] env[62522]: _type = "Task" [ 1182.359510] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.367910] env[62522]: DEBUG oslo_vmware.api [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416304, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.581699] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62522) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1182.582052] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.232s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1182.582401] env[62522]: DEBUG oslo_concurrency.lockutils [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.862s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1182.584127] env[62522]: INFO nova.compute.claims [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1182.586966] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1182.587132] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Cleaning up deleted instances {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11745}} [ 1182.873238] env[62522]: DEBUG oslo_vmware.api [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416304, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.107832] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] There are 52 instances to clean {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11754}} [ 1183.107832] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 80dd48b7-09fb-4127-af11-b2d52a49ca12] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1183.373964] env[62522]: DEBUG oslo_vmware.api [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416304, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.618858} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.374284] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 08d7e318-ea68-4807-a300-ee4a7993647d/08d7e318-ea68-4807-a300-ee4a7993647d.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1183.374495] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1183.374780] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3f8c05cf-88a5-4b74-aedb-a1e46697134e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.382392] env[62522]: DEBUG oslo_vmware.api [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1183.382392] env[62522]: value = "task-2416306" [ 1183.382392] env[62522]: _type = "Task" [ 1183.382392] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.392377] env[62522]: DEBUG oslo_vmware.api [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416306, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.614694] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 93a2505e-814d-4809-90a9-0bc215406efd] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1183.738545] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-879b20b6-31f6-4624-863c-c12599ddd728 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.746969] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-927c0674-1069-4e22-a220-c25b760fb53f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.777723] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-088dfb9b-5d3f-489e-addd-0918038ff8e3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.785428] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c13d636a-f5d2-4d91-97ef-8e742f3b96f7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.799666] env[62522]: DEBUG nova.compute.provider_tree [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1183.892926] env[62522]: DEBUG oslo_vmware.api [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416306, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095099} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.893315] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1183.894182] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc3d3fdf-0679-4e5e-bdc0-ae697bed9036 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.916620] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] 08d7e318-ea68-4807-a300-ee4a7993647d/08d7e318-ea68-4807-a300-ee4a7993647d.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1183.916939] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0cf2af2b-b485-479d-bdb3-cfa1726c9724 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.940151] env[62522]: DEBUG oslo_vmware.api [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1183.940151] env[62522]: value = "task-2416307" [ 1183.940151] env[62522]: _type = "Task" [ 1183.940151] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.950227] env[62522]: DEBUG oslo_vmware.api [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416307, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.121149] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 981a4839-28d0-4d91-88cd-99c1d263ca4d] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1184.302604] env[62522]: DEBUG nova.scheduler.client.report [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1184.452639] env[62522]: DEBUG oslo_vmware.api [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416307, 'name': ReconfigVM_Task, 'duration_secs': 0.320143} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.452919] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Reconfigured VM instance instance-00000068 to attach disk [datastore1] 08d7e318-ea68-4807-a300-ee4a7993647d/08d7e318-ea68-4807-a300-ee4a7993647d.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1184.453724] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6e0fce97-d74f-4a92-8393-74c12bfc9837 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.461680] env[62522]: DEBUG oslo_vmware.api [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1184.461680] env[62522]: value = "task-2416308" [ 1184.461680] env[62522]: _type = "Task" [ 1184.461680] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.471934] env[62522]: DEBUG oslo_vmware.api [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416308, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.624878] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 92604d35-7e59-45b0-9dce-32e515703936] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1184.808130] env[62522]: DEBUG oslo_concurrency.lockutils [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.226s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1184.808734] env[62522]: DEBUG nova.compute.manager [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1184.811844] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.285s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1184.813344] env[62522]: INFO nova.compute.claims [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1184.972493] env[62522]: DEBUG oslo_vmware.api [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416308, 'name': Rename_Task, 'duration_secs': 0.166739} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.972734] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1184.972983] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-393f4743-03ef-4396-b7f8-84d170ee370b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.981653] env[62522]: DEBUG oslo_vmware.api [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1184.981653] env[62522]: value = "task-2416309" [ 1184.981653] env[62522]: _type = "Task" [ 1184.981653] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.985975] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-80500ddb-0bba-4a92-a3fe-67677dce01ad tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Volume attach. Driver type: vmdk {{(pid=62522) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1184.986241] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-80500ddb-0bba-4a92-a3fe-67677dce01ad tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489847', 'volume_id': 'a0aa294f-c381-417e-981c-8709a38bb633', 'name': 'volume-a0aa294f-c381-417e-981c-8709a38bb633', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f3894644-eb7e-4a6d-9029-4cd30466d6f8', 'attached_at': '', 'detached_at': '', 'volume_id': 'a0aa294f-c381-417e-981c-8709a38bb633', 'serial': 'a0aa294f-c381-417e-981c-8709a38bb633'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1184.987426] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71301f7d-707e-41ba-99ed-835d1a217b76 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.008551] env[62522]: DEBUG oslo_vmware.api [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416309, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.009338] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a014663-254f-4b84-80eb-0e392975aed3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.039333] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-80500ddb-0bba-4a92-a3fe-67677dce01ad tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] volume-a0aa294f-c381-417e-981c-8709a38bb633/volume-a0aa294f-c381-417e-981c-8709a38bb633.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1185.039687] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e9ee73f6-abca-442d-b564-b15ca68eca0a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.059287] env[62522]: DEBUG oslo_vmware.api [None req-80500ddb-0bba-4a92-a3fe-67677dce01ad tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 1185.059287] env[62522]: value = "task-2416310" [ 1185.059287] env[62522]: _type = "Task" [ 1185.059287] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.070241] env[62522]: DEBUG oslo_vmware.api [None req-80500ddb-0bba-4a92-a3fe-67677dce01ad tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416310, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.128603] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 5426087f-3dd0-4796-aa46-6020a3bda4f5] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1185.317935] env[62522]: DEBUG nova.compute.utils [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1185.321857] env[62522]: DEBUG nova.compute.manager [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1185.322052] env[62522]: DEBUG nova.network.neutron [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1185.363949] env[62522]: DEBUG nova.policy [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '30322764deb64be28fcba5630b7240d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f93394feaa4f4b61a5d3d670d32ec599', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1185.493020] env[62522]: DEBUG oslo_vmware.api [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416309, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.571470] env[62522]: DEBUG oslo_vmware.api [None req-80500ddb-0bba-4a92-a3fe-67677dce01ad tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416310, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.631835] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: e369d9e1-1345-4038-b5f3-f816fe767a72] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1185.745020] env[62522]: DEBUG nova.network.neutron [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Successfully created port: 33665d0f-b7dd-4d62-86d5-8ccb8f178e97 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1185.823070] env[62522]: DEBUG nova.compute.manager [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1185.956294] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c3a9a3c-a1f0-4876-bedc-a13869082d84 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.965790] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-385e6deb-c698-45ea-8913-61fde9994b5a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.999793] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0fc5a6d-cb20-4d5a-98bb-467b6a6babe8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.010841] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b2ffd2-4073-4fe6-8cdf-f71b300b8249 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.014657] env[62522]: DEBUG oslo_vmware.api [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416309, 'name': PowerOnVM_Task, 'duration_secs': 0.783857} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.014927] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1186.015145] env[62522]: INFO nova.compute.manager [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Took 8.24 seconds to spawn the instance on the hypervisor. [ 1186.015332] env[62522]: DEBUG nova.compute.manager [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1186.016458] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e359b7e-8a76-498e-aa60-fe7dd06f220e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.027991] env[62522]: DEBUG nova.compute.provider_tree [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1186.070683] env[62522]: DEBUG oslo_vmware.api [None req-80500ddb-0bba-4a92-a3fe-67677dce01ad tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416310, 'name': ReconfigVM_Task, 'duration_secs': 0.551373} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.070991] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-80500ddb-0bba-4a92-a3fe-67677dce01ad tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Reconfigured VM instance instance-00000064 to attach disk [datastore1] volume-a0aa294f-c381-417e-981c-8709a38bb633/volume-a0aa294f-c381-417e-981c-8709a38bb633.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1186.077153] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5aed917-6ee0-4dab-a39f-3f3f4c097816 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.093584] env[62522]: DEBUG oslo_vmware.api [None req-80500ddb-0bba-4a92-a3fe-67677dce01ad tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 1186.093584] env[62522]: value = "task-2416311" [ 1186.093584] env[62522]: _type = "Task" [ 1186.093584] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.103366] env[62522]: DEBUG oslo_vmware.api [None req-80500ddb-0bba-4a92-a3fe-67677dce01ad tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416311, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.135338] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 783d9ae7-67f5-4c54-81a7-6715b762afb3] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1186.531984] env[62522]: DEBUG nova.scheduler.client.report [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1186.544964] env[62522]: INFO nova.compute.manager [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Took 14.57 seconds to build instance. [ 1186.604853] env[62522]: DEBUG oslo_vmware.api [None req-80500ddb-0bba-4a92-a3fe-67677dce01ad tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416311, 'name': ReconfigVM_Task, 'duration_secs': 0.199255} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.605130] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-80500ddb-0bba-4a92-a3fe-67677dce01ad tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489847', 'volume_id': 'a0aa294f-c381-417e-981c-8709a38bb633', 'name': 'volume-a0aa294f-c381-417e-981c-8709a38bb633', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f3894644-eb7e-4a6d-9029-4cd30466d6f8', 'attached_at': '', 'detached_at': '', 'volume_id': 'a0aa294f-c381-417e-981c-8709a38bb633', 'serial': 'a0aa294f-c381-417e-981c-8709a38bb633'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1186.638620] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 9337449d-5aff-4170-83ea-42fe2e9d1657] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1186.834876] env[62522]: DEBUG nova.compute.manager [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1186.864011] env[62522]: DEBUG nova.virt.hardware [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1186.864233] env[62522]: DEBUG nova.virt.hardware [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1186.864383] env[62522]: DEBUG nova.virt.hardware [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1186.864565] env[62522]: DEBUG nova.virt.hardware [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1186.864710] env[62522]: DEBUG nova.virt.hardware [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1186.864857] env[62522]: DEBUG nova.virt.hardware [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1186.865071] env[62522]: DEBUG nova.virt.hardware [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1186.865236] env[62522]: DEBUG nova.virt.hardware [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1186.865405] env[62522]: DEBUG nova.virt.hardware [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1186.865640] env[62522]: DEBUG nova.virt.hardware [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1186.865841] env[62522]: DEBUG nova.virt.hardware [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1186.866725] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d53203-037f-405a-a400-e4d4fcc8720c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.875258] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b8b57c3-51d5-494f-960a-5035a9987422 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.037679] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.226s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1187.038235] env[62522]: DEBUG nova.compute.manager [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1187.040921] env[62522]: DEBUG oslo_concurrency.lockutils [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.762s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1187.041165] env[62522]: DEBUG nova.objects.instance [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lazy-loading 'resources' on Instance uuid 892926ef-3044-497c-8fc8-30cd298e4311 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1187.047192] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0f757081-c8b9-4cb9-b538-43885df4f876 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "08d7e318-ea68-4807-a300-ee4a7993647d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.083s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1187.141472] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 522e778b-6e01-4554-a3eb-dd1efa7870de] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1187.354540] env[62522]: DEBUG nova.compute.manager [req-816926b0-33ae-4bae-a6fb-cbab1b409043 req-b77799ea-a46a-450d-9237-a819345855dc service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Received event network-vif-plugged-33665d0f-b7dd-4d62-86d5-8ccb8f178e97 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1187.354771] env[62522]: DEBUG oslo_concurrency.lockutils [req-816926b0-33ae-4bae-a6fb-cbab1b409043 req-b77799ea-a46a-450d-9237-a819345855dc service nova] Acquiring lock "cb7a19f1-6093-47ee-bbbc-a75dd5423f32-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1187.355021] env[62522]: DEBUG oslo_concurrency.lockutils [req-816926b0-33ae-4bae-a6fb-cbab1b409043 req-b77799ea-a46a-450d-9237-a819345855dc service nova] Lock "cb7a19f1-6093-47ee-bbbc-a75dd5423f32-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1187.355315] env[62522]: DEBUG oslo_concurrency.lockutils [req-816926b0-33ae-4bae-a6fb-cbab1b409043 req-b77799ea-a46a-450d-9237-a819345855dc service nova] Lock "cb7a19f1-6093-47ee-bbbc-a75dd5423f32-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1187.355937] env[62522]: DEBUG nova.compute.manager [req-816926b0-33ae-4bae-a6fb-cbab1b409043 req-b77799ea-a46a-450d-9237-a819345855dc service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] No waiting events found dispatching network-vif-plugged-33665d0f-b7dd-4d62-86d5-8ccb8f178e97 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1187.355937] env[62522]: WARNING nova.compute.manager [req-816926b0-33ae-4bae-a6fb-cbab1b409043 req-b77799ea-a46a-450d-9237-a819345855dc service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Received unexpected event network-vif-plugged-33665d0f-b7dd-4d62-86d5-8ccb8f178e97 for instance with vm_state building and task_state spawning. [ 1187.493729] env[62522]: DEBUG nova.network.neutron [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Successfully updated port: 33665d0f-b7dd-4d62-86d5-8ccb8f178e97 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1187.543728] env[62522]: DEBUG nova.compute.utils [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1187.545206] env[62522]: DEBUG nova.compute.manager [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1187.545401] env[62522]: DEBUG nova.network.neutron [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1187.602429] env[62522]: DEBUG nova.policy [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'edec975faaef4f2ba31aa0de30590522', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fa792663b4ac41b7bf4c5e4b290f9b86', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1187.650577] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 1a5a235a-477f-4da5-b5c1-ee057211cce8] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1187.654139] env[62522]: DEBUG nova.objects.instance [None req-80500ddb-0bba-4a92-a3fe-67677dce01ad tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lazy-loading 'flavor' on Instance uuid f3894644-eb7e-4a6d-9029-4cd30466d6f8 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1187.691333] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-398eb2ef-7c12-42bf-bf35-06ab81d17e17 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.703418] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fa34afc-3825-499d-ac7d-c1424323b553 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.737157] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d68d35b-eb93-4b05-95f9-e94de184522f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.743341] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcb9688e-ba41-4181-8b03-5da1e7e5bd48 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.757555] env[62522]: DEBUG nova.compute.provider_tree [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1187.890378] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a0055793-1b32-4bd4-b6f6-c2b540553fde tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "08d7e318-ea68-4807-a300-ee4a7993647d" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1187.890378] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a0055793-1b32-4bd4-b6f6-c2b540553fde tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "08d7e318-ea68-4807-a300-ee4a7993647d" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1187.890378] env[62522]: DEBUG nova.compute.manager [None req-a0055793-1b32-4bd4-b6f6-c2b540553fde tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1187.891529] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42eb8463-5903-4f7e-a79f-302ac369d581 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.898095] env[62522]: DEBUG nova.compute.manager [None req-a0055793-1b32-4bd4-b6f6-c2b540553fde tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62522) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1187.898633] env[62522]: DEBUG nova.objects.instance [None req-a0055793-1b32-4bd4-b6f6-c2b540553fde tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lazy-loading 'flavor' on Instance uuid 08d7e318-ea68-4807-a300-ee4a7993647d {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1187.998554] env[62522]: DEBUG oslo_concurrency.lockutils [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "refresh_cache-cb7a19f1-6093-47ee-bbbc-a75dd5423f32" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1187.998554] env[62522]: DEBUG oslo_concurrency.lockutils [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquired lock "refresh_cache-cb7a19f1-6093-47ee-bbbc-a75dd5423f32" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.998554] env[62522]: DEBUG nova.network.neutron [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1188.000401] env[62522]: DEBUG nova.network.neutron [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Successfully created port: fb503ded-334f-4a04-b774-61284edf466f {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1188.052116] env[62522]: DEBUG nova.compute.manager [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1188.159113] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 5c9b1120-84ad-48d5-8cd4-0cf387963066] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1188.161125] env[62522]: DEBUG oslo_concurrency.lockutils [None req-80500ddb-0bba-4a92-a3fe-67677dce01ad tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.777s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1188.260988] env[62522]: DEBUG nova.scheduler.client.report [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1188.538343] env[62522]: DEBUG nova.network.neutron [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1188.554051] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c21d9b3d-fb6f-4fa2-b00b-917659867bb2 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquiring lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1188.554343] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c21d9b3d-fb6f-4fa2-b00b-917659867bb2 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1188.663073] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 3b2cd0b6-0c7a-411c-a7f5-64835f2179dc] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1188.757576] env[62522]: DEBUG oslo_vmware.rw_handles [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525ee7bd-c299-8184-10d3-6a6737fc011d/disk-0.vmdk. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1188.758671] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27229a0d-672f-47d7-a8ed-7fb04254a544 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.762313] env[62522]: DEBUG nova.network.neutron [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Updating instance_info_cache with network_info: [{"id": "33665d0f-b7dd-4d62-86d5-8ccb8f178e97", "address": "fa:16:3e:1d:d3:51", "network": {"id": "949f3536-8a7e-4edf-b6cc-6a264fe5fe83", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1891232839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f93394feaa4f4b61a5d3d670d32ec599", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33665d0f-b7", "ovs_interfaceid": "33665d0f-b7dd-4d62-86d5-8ccb8f178e97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1188.765545] env[62522]: DEBUG oslo_concurrency.lockutils [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.725s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1188.769454] env[62522]: DEBUG oslo_vmware.rw_handles [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525ee7bd-c299-8184-10d3-6a6737fc011d/disk-0.vmdk is in state: ready. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1188.769761] env[62522]: ERROR oslo_vmware.rw_handles [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525ee7bd-c299-8184-10d3-6a6737fc011d/disk-0.vmdk due to incomplete transfer. [ 1188.769890] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d0070c05-4d65-4856-85c5-890483847914 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.779035] env[62522]: DEBUG oslo_vmware.rw_handles [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525ee7bd-c299-8184-10d3-6a6737fc011d/disk-0.vmdk. {{(pid=62522) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1188.779361] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Uploaded image d64c1d73-9bd0-47b2-8b2e-f26e09a3b34f to the Glance image server {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1188.781678] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Destroying the VM {{(pid=62522) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1188.782187] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ef29d066-402a-4942-bdef-73dcf5923d44 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.788973] env[62522]: DEBUG oslo_vmware.api [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1188.788973] env[62522]: value = "task-2416312" [ 1188.788973] env[62522]: _type = "Task" [ 1188.788973] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.793801] env[62522]: INFO nova.scheduler.client.report [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Deleted allocations for instance 892926ef-3044-497c-8fc8-30cd298e4311 [ 1188.800415] env[62522]: DEBUG oslo_vmware.api [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416312, 'name': Destroy_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.905314] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0055793-1b32-4bd4-b6f6-c2b540553fde tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1188.905568] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-51277738-a8a6-4196-be05-0abb963e3fc4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.913682] env[62522]: DEBUG oslo_vmware.api [None req-a0055793-1b32-4bd4-b6f6-c2b540553fde tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1188.913682] env[62522]: value = "task-2416313" [ 1188.913682] env[62522]: _type = "Task" [ 1188.913682] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.924141] env[62522]: DEBUG oslo_vmware.api [None req-a0055793-1b32-4bd4-b6f6-c2b540553fde tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416313, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.057885] env[62522]: DEBUG nova.compute.utils [None req-c21d9b3d-fb6f-4fa2-b00b-917659867bb2 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1189.070636] env[62522]: DEBUG nova.compute.manager [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1189.093883] env[62522]: DEBUG nova.virt.hardware [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1189.094213] env[62522]: DEBUG nova.virt.hardware [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1189.094433] env[62522]: DEBUG nova.virt.hardware [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1189.094681] env[62522]: DEBUG nova.virt.hardware [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1189.094814] env[62522]: DEBUG nova.virt.hardware [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1189.094973] env[62522]: DEBUG nova.virt.hardware [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1189.095239] env[62522]: DEBUG nova.virt.hardware [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1189.095460] env[62522]: DEBUG nova.virt.hardware [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1189.095681] env[62522]: DEBUG nova.virt.hardware [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1189.095877] env[62522]: DEBUG nova.virt.hardware [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1189.096102] env[62522]: DEBUG nova.virt.hardware [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1189.097050] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23946358-c80e-47e8-9f0b-5bb5946f5c61 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.105583] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f02b52c8-20ab-4f1c-b966-d05ab48b6691 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.166259] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: cabe40a0-8bd0-4d77-b949-298bd194fa42] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1189.264638] env[62522]: DEBUG oslo_concurrency.lockutils [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Releasing lock "refresh_cache-cb7a19f1-6093-47ee-bbbc-a75dd5423f32" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1189.265116] env[62522]: DEBUG nova.compute.manager [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Instance network_info: |[{"id": "33665d0f-b7dd-4d62-86d5-8ccb8f178e97", "address": "fa:16:3e:1d:d3:51", "network": {"id": "949f3536-8a7e-4edf-b6cc-6a264fe5fe83", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1891232839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f93394feaa4f4b61a5d3d670d32ec599", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33665d0f-b7", "ovs_interfaceid": "33665d0f-b7dd-4d62-86d5-8ccb8f178e97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1189.265644] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1d:d3:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '983826cf-6390-4ec6-bf97-30a1060947fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '33665d0f-b7dd-4d62-86d5-8ccb8f178e97', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1189.273385] env[62522]: DEBUG oslo.service.loopingcall [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1189.273630] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1189.273882] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-62e785dd-b28f-45e7-a0d5-bc2dfab0c6db {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.298560] env[62522]: DEBUG oslo_vmware.api [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416312, 'name': Destroy_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.299838] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1189.299838] env[62522]: value = "task-2416314" [ 1189.299838] env[62522]: _type = "Task" [ 1189.299838] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.305663] env[62522]: DEBUG oslo_concurrency.lockutils [None req-abf37b33-567b-4456-9cec-46652c86ccdc tempest-AttachInterfacesTestJSON-1457883008 tempest-AttachInterfacesTestJSON-1457883008-project-member] Lock "892926ef-3044-497c-8fc8-30cd298e4311" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.418s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1189.309718] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416314, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.380643] env[62522]: DEBUG nova.compute.manager [req-a2c3f675-fc6b-444d-9b6e-294e18803c2d req-366ef7b7-d656-4e09-9697-9dac50188db9 service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Received event network-changed-33665d0f-b7dd-4d62-86d5-8ccb8f178e97 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1189.380971] env[62522]: DEBUG nova.compute.manager [req-a2c3f675-fc6b-444d-9b6e-294e18803c2d req-366ef7b7-d656-4e09-9697-9dac50188db9 service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Refreshing instance network info cache due to event network-changed-33665d0f-b7dd-4d62-86d5-8ccb8f178e97. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1189.381368] env[62522]: DEBUG oslo_concurrency.lockutils [req-a2c3f675-fc6b-444d-9b6e-294e18803c2d req-366ef7b7-d656-4e09-9697-9dac50188db9 service nova] Acquiring lock "refresh_cache-cb7a19f1-6093-47ee-bbbc-a75dd5423f32" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1189.381541] env[62522]: DEBUG oslo_concurrency.lockutils [req-a2c3f675-fc6b-444d-9b6e-294e18803c2d req-366ef7b7-d656-4e09-9697-9dac50188db9 service nova] Acquired lock "refresh_cache-cb7a19f1-6093-47ee-bbbc-a75dd5423f32" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1189.381714] env[62522]: DEBUG nova.network.neutron [req-a2c3f675-fc6b-444d-9b6e-294e18803c2d req-366ef7b7-d656-4e09-9697-9dac50188db9 service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Refreshing network info cache for port 33665d0f-b7dd-4d62-86d5-8ccb8f178e97 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1189.423868] env[62522]: DEBUG oslo_vmware.api [None req-a0055793-1b32-4bd4-b6f6-c2b540553fde tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416313, 'name': PowerOffVM_Task, 'duration_secs': 0.355623} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.424187] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0055793-1b32-4bd4-b6f6-c2b540553fde tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1189.424390] env[62522]: DEBUG nova.compute.manager [None req-a0055793-1b32-4bd4-b6f6-c2b540553fde tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1189.425166] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d7597f2-0464-4a53-917b-bd5454286d9f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.560760] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c21d9b3d-fb6f-4fa2-b00b-917659867bb2 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1189.648708] env[62522]: DEBUG nova.network.neutron [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Successfully updated port: fb503ded-334f-4a04-b774-61284edf466f {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1189.673575] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 97f4c6ab-04de-4069-8ce0-1509c30ffb0f] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1189.799338] env[62522]: DEBUG oslo_vmware.api [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416312, 'name': Destroy_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.808247] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416314, 'name': CreateVM_Task, 'duration_secs': 0.431094} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.808421] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1189.809093] env[62522]: DEBUG oslo_concurrency.lockutils [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1189.809258] env[62522]: DEBUG oslo_concurrency.lockutils [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1189.809575] env[62522]: DEBUG oslo_concurrency.lockutils [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1189.809824] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93d466c8-b913-4bf3-b196-c26248bb4791 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.813930] env[62522]: DEBUG oslo_vmware.api [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1189.813930] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5243c4b9-f422-14d1-03ee-d1b791a78857" [ 1189.813930] env[62522]: _type = "Task" [ 1189.813930] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.824522] env[62522]: DEBUG oslo_vmware.api [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5243c4b9-f422-14d1-03ee-d1b791a78857, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.938046] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a0055793-1b32-4bd4-b6f6-c2b540553fde tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "08d7e318-ea68-4807-a300-ee4a7993647d" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.048s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1190.117095] env[62522]: DEBUG nova.network.neutron [req-a2c3f675-fc6b-444d-9b6e-294e18803c2d req-366ef7b7-d656-4e09-9697-9dac50188db9 service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Updated VIF entry in instance network info cache for port 33665d0f-b7dd-4d62-86d5-8ccb8f178e97. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1190.117474] env[62522]: DEBUG nova.network.neutron [req-a2c3f675-fc6b-444d-9b6e-294e18803c2d req-366ef7b7-d656-4e09-9697-9dac50188db9 service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Updating instance_info_cache with network_info: [{"id": "33665d0f-b7dd-4d62-86d5-8ccb8f178e97", "address": "fa:16:3e:1d:d3:51", "network": {"id": "949f3536-8a7e-4edf-b6cc-6a264fe5fe83", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1891232839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f93394feaa4f4b61a5d3d670d32ec599", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33665d0f-b7", "ovs_interfaceid": "33665d0f-b7dd-4d62-86d5-8ccb8f178e97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1190.152654] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "refresh_cache-a4cb5c19-9087-4354-9689-a99ae8924dc1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1190.152825] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquired lock "refresh_cache-a4cb5c19-9087-4354-9689-a99ae8924dc1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1190.152977] env[62522]: DEBUG nova.network.neutron [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1190.176569] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 4e9436df-c86b-429b-abc2-97f760858055] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1190.299613] env[62522]: DEBUG oslo_vmware.api [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416312, 'name': Destroy_Task, 'duration_secs': 1.355463} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.299872] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Destroyed the VM [ 1190.300171] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Deleting Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1190.300757] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-63d28a6e-4b57-4792-aa7b-5b76825bf1f0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.306169] env[62522]: DEBUG oslo_vmware.api [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1190.306169] env[62522]: value = "task-2416315" [ 1190.306169] env[62522]: _type = "Task" [ 1190.306169] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.313784] env[62522]: DEBUG oslo_vmware.api [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416315, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.322902] env[62522]: DEBUG oslo_vmware.api [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5243c4b9-f422-14d1-03ee-d1b791a78857, 'name': SearchDatastore_Task, 'duration_secs': 0.009089} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.323241] env[62522]: DEBUG oslo_concurrency.lockutils [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1190.323484] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1190.323712] env[62522]: DEBUG oslo_concurrency.lockutils [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1190.323857] env[62522]: DEBUG oslo_concurrency.lockutils [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1190.324047] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1190.324300] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-088bf771-21f4-43e0-8fc9-d592dd855fe2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.331740] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1190.331903] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1190.332585] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce454baf-ee1d-4814-8d28-043ffb75eb01 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.337415] env[62522]: DEBUG oslo_vmware.api [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1190.337415] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5275f64e-3fcb-add1-7ec7-0b428bfea1d1" [ 1190.337415] env[62522]: _type = "Task" [ 1190.337415] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.345494] env[62522]: DEBUG oslo_vmware.api [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5275f64e-3fcb-add1-7ec7-0b428bfea1d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.625584] env[62522]: DEBUG oslo_concurrency.lockutils [req-a2c3f675-fc6b-444d-9b6e-294e18803c2d req-366ef7b7-d656-4e09-9697-9dac50188db9 service nova] Releasing lock "refresh_cache-cb7a19f1-6093-47ee-bbbc-a75dd5423f32" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1190.629060] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c21d9b3d-fb6f-4fa2-b00b-917659867bb2 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquiring lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1190.629060] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c21d9b3d-fb6f-4fa2-b00b-917659867bb2 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1190.629060] env[62522]: INFO nova.compute.manager [None req-c21d9b3d-fb6f-4fa2-b00b-917659867bb2 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Attaching volume 894e4909-b283-41ec-bddf-7ed9bff284d1 to /dev/sdc [ 1190.630748] env[62522]: INFO nova.compute.manager [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Rebuilding instance [ 1190.670731] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb4c1b37-d42a-407f-88ad-965ac142fed9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.678094] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11c9dd38-a32d-4845-8878-5306000596d3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.683884] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 921c14c9-27fa-4eda-9831-6263ad0d6c57] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1190.688455] env[62522]: DEBUG nova.compute.manager [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1190.689231] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2060ea1d-fae1-48df-9849-29feb1441fcb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.694227] env[62522]: DEBUG nova.network.neutron [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1190.701457] env[62522]: DEBUG nova.virt.block_device [None req-c21d9b3d-fb6f-4fa2-b00b-917659867bb2 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Updating existing volume attachment record: ccb8c9bc-c4ab-4164-8b18-9bdecfc49450 {{(pid=62522) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1190.817797] env[62522]: DEBUG oslo_vmware.api [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416315, 'name': RemoveSnapshot_Task, 'duration_secs': 0.358072} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.820361] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Deleted Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1190.820638] env[62522]: DEBUG nova.compute.manager [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1190.821454] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbdc1de9-92f0-4131-9767-4393382c94af {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.854995] env[62522]: DEBUG oslo_vmware.api [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5275f64e-3fcb-add1-7ec7-0b428bfea1d1, 'name': SearchDatastore_Task, 'duration_secs': 0.008808} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.854995] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad1f9096-11a2-4c7b-adbf-3febc08ff4ea {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.854995] env[62522]: DEBUG oslo_vmware.api [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1190.854995] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520448b9-ed67-8d2e-78a8-4215f4fdbf70" [ 1190.854995] env[62522]: _type = "Task" [ 1190.854995] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.861962] env[62522]: DEBUG oslo_vmware.api [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520448b9-ed67-8d2e-78a8-4215f4fdbf70, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.885561] env[62522]: DEBUG nova.network.neutron [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Updating instance_info_cache with network_info: [{"id": "fb503ded-334f-4a04-b774-61284edf466f", "address": "fa:16:3e:0d:45:12", "network": {"id": "2c9c537f-91b6-4217-8eaf-dc187f4ce7d5", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1154766161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fa792663b4ac41b7bf4c5e4b290f9b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb503ded-33", "ovs_interfaceid": "fb503ded-334f-4a04-b774-61284edf466f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1191.189442] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 4e27a87c-4891-4e69-a6fa-312b026bf11e] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1191.333819] env[62522]: INFO nova.compute.manager [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Shelve offloading [ 1191.366691] env[62522]: DEBUG oslo_vmware.api [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520448b9-ed67-8d2e-78a8-4215f4fdbf70, 'name': SearchDatastore_Task, 'duration_secs': 0.009204} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.370018] env[62522]: DEBUG oslo_concurrency.lockutils [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1191.370018] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] cb7a19f1-6093-47ee-bbbc-a75dd5423f32/cb7a19f1-6093-47ee-bbbc-a75dd5423f32.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1191.370018] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4f4a0063-4f50-41b8-9951-97a9b64a195a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.373825] env[62522]: DEBUG oslo_vmware.api [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1191.373825] env[62522]: value = "task-2416317" [ 1191.373825] env[62522]: _type = "Task" [ 1191.373825] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.382704] env[62522]: DEBUG oslo_vmware.api [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416317, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.387873] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Releasing lock "refresh_cache-a4cb5c19-9087-4354-9689-a99ae8924dc1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1191.388738] env[62522]: DEBUG nova.compute.manager [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Instance network_info: |[{"id": "fb503ded-334f-4a04-b774-61284edf466f", "address": "fa:16:3e:0d:45:12", "network": {"id": "2c9c537f-91b6-4217-8eaf-dc187f4ce7d5", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1154766161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fa792663b4ac41b7bf4c5e4b290f9b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb503ded-33", "ovs_interfaceid": "fb503ded-334f-4a04-b774-61284edf466f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1191.389195] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0d:45:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc7aa55d-223a-4157-9137-88dc492f2db2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fb503ded-334f-4a04-b774-61284edf466f', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1191.398731] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Creating folder: Project (fa792663b4ac41b7bf4c5e4b290f9b86). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1191.399042] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5b205905-f34f-475f-b4f5-4481582fe2c8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.409737] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Created folder: Project (fa792663b4ac41b7bf4c5e4b290f9b86) in parent group-v489562. [ 1191.409737] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Creating folder: Instances. Parent ref: group-v489850. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1191.409737] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1dab3df4-d673-4829-9feb-b1946bdb89cb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.417950] env[62522]: DEBUG nova.compute.manager [req-36cb5bf0-9389-4542-b872-959521d34b0d req-d91e588b-85f2-4e11-91ad-cf7dce98ab90 service nova] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Received event network-vif-plugged-fb503ded-334f-4a04-b774-61284edf466f {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1191.418210] env[62522]: DEBUG oslo_concurrency.lockutils [req-36cb5bf0-9389-4542-b872-959521d34b0d req-d91e588b-85f2-4e11-91ad-cf7dce98ab90 service nova] Acquiring lock "a4cb5c19-9087-4354-9689-a99ae8924dc1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1191.418427] env[62522]: DEBUG oslo_concurrency.lockutils [req-36cb5bf0-9389-4542-b872-959521d34b0d req-d91e588b-85f2-4e11-91ad-cf7dce98ab90 service nova] Lock "a4cb5c19-9087-4354-9689-a99ae8924dc1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1191.418594] env[62522]: DEBUG oslo_concurrency.lockutils [req-36cb5bf0-9389-4542-b872-959521d34b0d req-d91e588b-85f2-4e11-91ad-cf7dce98ab90 service nova] Lock "a4cb5c19-9087-4354-9689-a99ae8924dc1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1191.418765] env[62522]: DEBUG nova.compute.manager [req-36cb5bf0-9389-4542-b872-959521d34b0d req-d91e588b-85f2-4e11-91ad-cf7dce98ab90 service nova] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] No waiting events found dispatching network-vif-plugged-fb503ded-334f-4a04-b774-61284edf466f {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1191.418930] env[62522]: WARNING nova.compute.manager [req-36cb5bf0-9389-4542-b872-959521d34b0d req-d91e588b-85f2-4e11-91ad-cf7dce98ab90 service nova] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Received unexpected event network-vif-plugged-fb503ded-334f-4a04-b774-61284edf466f for instance with vm_state building and task_state spawning. [ 1191.419105] env[62522]: DEBUG nova.compute.manager [req-36cb5bf0-9389-4542-b872-959521d34b0d req-d91e588b-85f2-4e11-91ad-cf7dce98ab90 service nova] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Received event network-changed-fb503ded-334f-4a04-b774-61284edf466f {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1191.419262] env[62522]: DEBUG nova.compute.manager [req-36cb5bf0-9389-4542-b872-959521d34b0d req-d91e588b-85f2-4e11-91ad-cf7dce98ab90 service nova] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Refreshing instance network info cache due to event network-changed-fb503ded-334f-4a04-b774-61284edf466f. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1191.419444] env[62522]: DEBUG oslo_concurrency.lockutils [req-36cb5bf0-9389-4542-b872-959521d34b0d req-d91e588b-85f2-4e11-91ad-cf7dce98ab90 service nova] Acquiring lock "refresh_cache-a4cb5c19-9087-4354-9689-a99ae8924dc1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1191.419579] env[62522]: DEBUG oslo_concurrency.lockutils [req-36cb5bf0-9389-4542-b872-959521d34b0d req-d91e588b-85f2-4e11-91ad-cf7dce98ab90 service nova] Acquired lock "refresh_cache-a4cb5c19-9087-4354-9689-a99ae8924dc1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1191.419733] env[62522]: DEBUG nova.network.neutron [req-36cb5bf0-9389-4542-b872-959521d34b0d req-d91e588b-85f2-4e11-91ad-cf7dce98ab90 service nova] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Refreshing network info cache for port fb503ded-334f-4a04-b774-61284edf466f {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1191.423443] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Created folder: Instances in parent group-v489850. [ 1191.423702] env[62522]: DEBUG oslo.service.loopingcall [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1191.424049] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1191.424282] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-94732acd-375b-4f5c-b01d-d4bdcf10f715 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.451309] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1191.451309] env[62522]: value = "task-2416320" [ 1191.451309] env[62522]: _type = "Task" [ 1191.451309] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.463023] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416320, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.692870] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 02708991-7f71-408e-89d8-932b845553d1] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1191.710203] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1191.710516] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9bbda01d-3f21-4e54-948c-003df652c127 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.717296] env[62522]: DEBUG oslo_vmware.api [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1191.717296] env[62522]: value = "task-2416321" [ 1191.717296] env[62522]: _type = "Task" [ 1191.717296] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.727715] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] VM already powered off {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1191.727995] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1191.728801] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4472f23-7a78-4c7e-9b77-99afb8f862cc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.736366] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1191.736717] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ac6e8cac-a3e2-4716-a06d-a5afe859eda4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.806882] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1191.807221] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1191.807415] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Deleting the datastore file [datastore1] 08d7e318-ea68-4807-a300-ee4a7993647d {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1191.807595] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4df512d0-b106-4b5f-8a62-d81e5eb668a8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.814845] env[62522]: DEBUG oslo_vmware.api [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1191.814845] env[62522]: value = "task-2416323" [ 1191.814845] env[62522]: _type = "Task" [ 1191.814845] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.823278] env[62522]: DEBUG oslo_vmware.api [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416323, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.838047] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1191.838400] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3724a0e7-5e4b-4586-be94-f2c1adce119c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.844328] env[62522]: DEBUG oslo_vmware.api [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1191.844328] env[62522]: value = "task-2416324" [ 1191.844328] env[62522]: _type = "Task" [ 1191.844328] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.853211] env[62522]: DEBUG oslo_vmware.api [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416324, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.884131] env[62522]: DEBUG oslo_vmware.api [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416317, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.454808} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.884421] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] cb7a19f1-6093-47ee-bbbc-a75dd5423f32/cb7a19f1-6093-47ee-bbbc-a75dd5423f32.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1191.884671] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1191.884946] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0aeeeee8-1a18-4682-a65b-e74c52f9647f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.892520] env[62522]: DEBUG oslo_vmware.api [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1191.892520] env[62522]: value = "task-2416325" [ 1191.892520] env[62522]: _type = "Task" [ 1191.892520] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.901226] env[62522]: DEBUG oslo_vmware.api [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416325, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.966195] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416320, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.183162] env[62522]: DEBUG nova.network.neutron [req-36cb5bf0-9389-4542-b872-959521d34b0d req-d91e588b-85f2-4e11-91ad-cf7dce98ab90 service nova] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Updated VIF entry in instance network info cache for port fb503ded-334f-4a04-b774-61284edf466f. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1192.183613] env[62522]: DEBUG nova.network.neutron [req-36cb5bf0-9389-4542-b872-959521d34b0d req-d91e588b-85f2-4e11-91ad-cf7dce98ab90 service nova] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Updating instance_info_cache with network_info: [{"id": "fb503ded-334f-4a04-b774-61284edf466f", "address": "fa:16:3e:0d:45:12", "network": {"id": "2c9c537f-91b6-4217-8eaf-dc187f4ce7d5", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1154766161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fa792663b4ac41b7bf4c5e4b290f9b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb503ded-33", "ovs_interfaceid": "fb503ded-334f-4a04-b774-61284edf466f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1192.196854] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 895e6716-44cf-45b2-afd8-eaba71c32460] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1192.324831] env[62522]: DEBUG oslo_vmware.api [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416323, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.153476} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.325101] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1192.325294] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1192.325468] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1192.354250] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] VM already powered off {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1192.354490] env[62522]: DEBUG nova.compute.manager [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1192.355235] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c19b3372-a3b3-4826-8016-7eb61179fbf4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.360477] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "refresh_cache-24cf2f15-6f6a-4ded-b2fb-85093fddbf2b" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1192.360631] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquired lock "refresh_cache-24cf2f15-6f6a-4ded-b2fb-85093fddbf2b" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1192.360799] env[62522]: DEBUG nova.network.neutron [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1192.400811] env[62522]: DEBUG oslo_vmware.api [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416325, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069516} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.401116] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1192.401915] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a06b90b-cb47-4503-9e71-bfa5ba2452ab {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.425012] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] cb7a19f1-6093-47ee-bbbc-a75dd5423f32/cb7a19f1-6093-47ee-bbbc-a75dd5423f32.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1192.425312] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e186d4a-3ab0-40b4-be90-68ccac4dee42 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.444417] env[62522]: DEBUG oslo_vmware.api [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1192.444417] env[62522]: value = "task-2416326" [ 1192.444417] env[62522]: _type = "Task" [ 1192.444417] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.451978] env[62522]: DEBUG oslo_vmware.api [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416326, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.462561] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416320, 'name': CreateVM_Task, 'duration_secs': 0.53908} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.462561] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1192.462999] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1192.463524] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1192.463647] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1192.463927] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b60d1b85-ee17-41c0-92d9-43fb887d6aa2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.468206] env[62522]: DEBUG oslo_vmware.api [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1192.468206] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5278ae89-4f6e-24fe-7c28-f589fcedd586" [ 1192.468206] env[62522]: _type = "Task" [ 1192.468206] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.476134] env[62522]: DEBUG oslo_vmware.api [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5278ae89-4f6e-24fe-7c28-f589fcedd586, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.687728] env[62522]: DEBUG oslo_concurrency.lockutils [req-36cb5bf0-9389-4542-b872-959521d34b0d req-d91e588b-85f2-4e11-91ad-cf7dce98ab90 service nova] Releasing lock "refresh_cache-a4cb5c19-9087-4354-9689-a99ae8924dc1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1192.699659] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 548364e9-b19a-4777-8e62-19b8a0594f36] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1192.956741] env[62522]: DEBUG oslo_vmware.api [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416326, 'name': ReconfigVM_Task, 'duration_secs': 0.279836} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.957037] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Reconfigured VM instance instance-00000069 to attach disk [datastore2] cb7a19f1-6093-47ee-bbbc-a75dd5423f32/cb7a19f1-6093-47ee-bbbc-a75dd5423f32.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1192.957656] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3edc71cc-3d6a-477a-a1cf-30d962d3d3d4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.964727] env[62522]: DEBUG oslo_vmware.api [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1192.964727] env[62522]: value = "task-2416328" [ 1192.964727] env[62522]: _type = "Task" [ 1192.964727] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.974449] env[62522]: DEBUG oslo_vmware.api [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416328, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.981748] env[62522]: DEBUG oslo_vmware.api [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5278ae89-4f6e-24fe-7c28-f589fcedd586, 'name': SearchDatastore_Task, 'duration_secs': 0.012695} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.982034] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1192.982270] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1192.982502] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1192.982645] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1192.982821] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1192.983083] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a58e1b01-a48e-4d90-b817-b075d7a5d837 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.992984] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1192.993217] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1192.993924] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-762077dd-0c2a-454b-b862-eb0cb26f1046 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.999244] env[62522]: DEBUG oslo_vmware.api [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1192.999244] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]523d18e3-c1f2-d14e-c4f5-36aa7bf7a5ae" [ 1192.999244] env[62522]: _type = "Task" [ 1192.999244] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.008740] env[62522]: DEBUG oslo_vmware.api [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]523d18e3-c1f2-d14e-c4f5-36aa7bf7a5ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.077958] env[62522]: DEBUG nova.network.neutron [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Updating instance_info_cache with network_info: [{"id": "d25e13a4-7bac-4701-afa0-5fdd63ad7f3c", "address": "fa:16:3e:02:12:f6", "network": {"id": "21d0ca22-5509-4f9b-b167-f9fde2dac808", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-547933771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a06421250694a98b13ff34ad816dc75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd25e13a4-7b", "ovs_interfaceid": "d25e13a4-7bac-4701-afa0-5fdd63ad7f3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1193.203734] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: ec2d78cf-15f9-441b-9800-8fcc513f7774] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1193.361906] env[62522]: DEBUG nova.virt.hardware [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1193.362167] env[62522]: DEBUG nova.virt.hardware [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1193.362325] env[62522]: DEBUG nova.virt.hardware [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1193.362505] env[62522]: DEBUG nova.virt.hardware [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1193.362698] env[62522]: DEBUG nova.virt.hardware [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1193.362791] env[62522]: DEBUG nova.virt.hardware [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1193.362996] env[62522]: DEBUG nova.virt.hardware [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1193.363170] env[62522]: DEBUG nova.virt.hardware [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1193.363359] env[62522]: DEBUG nova.virt.hardware [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1193.363573] env[62522]: DEBUG nova.virt.hardware [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1193.363696] env[62522]: DEBUG nova.virt.hardware [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1193.364561] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8b119d1-6350-4a0d-ab12-c8cab3645bde {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.372460] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33d1bf64-b1f2-4953-8e95-b000a0712aec {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.386208] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:70:9f:11', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd7b5f1ef-d4b9-4ec3-b047-17e4cb349d25', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2eb2d7ef-0c29-4fda-947c-b02485470817', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1193.393659] env[62522]: DEBUG oslo.service.loopingcall [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1193.394024] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1193.394217] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5f00b04d-73f6-4177-8232-d85c0d075590 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.412847] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1193.412847] env[62522]: value = "task-2416329" [ 1193.412847] env[62522]: _type = "Task" [ 1193.412847] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.420398] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416329, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.477141] env[62522]: DEBUG oslo_vmware.api [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416328, 'name': Rename_Task, 'duration_secs': 0.180503} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.477432] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1193.477688] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0e6ae00f-e8e9-4685-bd3c-cb4c381245c3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.484984] env[62522]: DEBUG oslo_vmware.api [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1193.484984] env[62522]: value = "task-2416330" [ 1193.484984] env[62522]: _type = "Task" [ 1193.484984] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.494319] env[62522]: DEBUG oslo_vmware.api [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416330, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.507889] env[62522]: DEBUG oslo_vmware.api [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]523d18e3-c1f2-d14e-c4f5-36aa7bf7a5ae, 'name': SearchDatastore_Task, 'duration_secs': 0.014348} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.508663] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ebe0ed5-4f6d-43de-9ebe-4bdb553be0c4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.514019] env[62522]: DEBUG oslo_vmware.api [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1193.514019] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5221cde4-a9bc-56e0-d4af-71dd1550d420" [ 1193.514019] env[62522]: _type = "Task" [ 1193.514019] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.522152] env[62522]: DEBUG oslo_vmware.api [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5221cde4-a9bc-56e0-d4af-71dd1550d420, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.584203] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Releasing lock "refresh_cache-24cf2f15-6f6a-4ded-b2fb-85093fddbf2b" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1193.708812] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 7f8a8270-5014-446c-aa42-ea0b4079e5a9] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1193.929582] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416329, 'name': CreateVM_Task, 'duration_secs': 0.365688} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.929798] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1193.930491] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1193.930656] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1193.930975] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1193.931337] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d71c336-e7a5-4de9-8573-326dd9714f3c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.940033] env[62522]: DEBUG oslo_vmware.api [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1193.940033] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52991189-2c7f-3b46-c06c-37074d75523d" [ 1193.940033] env[62522]: _type = "Task" [ 1193.940033] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.954789] env[62522]: DEBUG oslo_vmware.api [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52991189-2c7f-3b46-c06c-37074d75523d, 'name': SearchDatastore_Task, 'duration_secs': 0.009675} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.954789] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1193.954789] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1193.955024] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1193.955194] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1193.955303] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1193.955747] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d856b984-83d3-436e-bdbe-b6e69f1bf1b9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.964595] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1193.964803] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1193.965521] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47a17cfc-1b2f-4ed6-820b-00d21b1db8a1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.972020] env[62522]: DEBUG oslo_vmware.api [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1193.972020] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5211de9c-5a93-fdc6-5217-d97d3ebacf88" [ 1193.972020] env[62522]: _type = "Task" [ 1193.972020] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.980076] env[62522]: DEBUG oslo_vmware.api [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5211de9c-5a93-fdc6-5217-d97d3ebacf88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.997176] env[62522]: DEBUG oslo_vmware.api [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416330, 'name': PowerOnVM_Task, 'duration_secs': 0.506907} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.997546] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1193.997672] env[62522]: INFO nova.compute.manager [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Took 7.16 seconds to spawn the instance on the hypervisor. [ 1193.997847] env[62522]: DEBUG nova.compute.manager [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1193.998636] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8c43d2d-ad62-44c9-b6f9-494cb6d05914 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.023403] env[62522]: DEBUG oslo_vmware.api [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5221cde4-a9bc-56e0-d4af-71dd1550d420, 'name': SearchDatastore_Task, 'duration_secs': 0.010192} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.023804] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1194.023928] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] a4cb5c19-9087-4354-9689-a99ae8924dc1/a4cb5c19-9087-4354-9689-a99ae8924dc1.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1194.024258] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d04e39aa-89b0-4f6f-9c5b-257248506c5d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.031208] env[62522]: DEBUG oslo_vmware.api [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1194.031208] env[62522]: value = "task-2416331" [ 1194.031208] env[62522]: _type = "Task" [ 1194.031208] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.039244] env[62522]: DEBUG oslo_vmware.api [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416331, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.067595] env[62522]: DEBUG nova.compute.manager [req-a89c7edc-db36-49a4-8abb-64d975c6ca76 req-f5e70757-4013-4927-a37c-4588852497d4 service nova] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Received event network-vif-unplugged-d25e13a4-7bac-4701-afa0-5fdd63ad7f3c {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1194.067866] env[62522]: DEBUG oslo_concurrency.lockutils [req-a89c7edc-db36-49a4-8abb-64d975c6ca76 req-f5e70757-4013-4927-a37c-4588852497d4 service nova] Acquiring lock "24cf2f15-6f6a-4ded-b2fb-85093fddbf2b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.068127] env[62522]: DEBUG oslo_concurrency.lockutils [req-a89c7edc-db36-49a4-8abb-64d975c6ca76 req-f5e70757-4013-4927-a37c-4588852497d4 service nova] Lock "24cf2f15-6f6a-4ded-b2fb-85093fddbf2b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.068251] env[62522]: DEBUG oslo_concurrency.lockutils [req-a89c7edc-db36-49a4-8abb-64d975c6ca76 req-f5e70757-4013-4927-a37c-4588852497d4 service nova] Lock "24cf2f15-6f6a-4ded-b2fb-85093fddbf2b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1194.068420] env[62522]: DEBUG nova.compute.manager [req-a89c7edc-db36-49a4-8abb-64d975c6ca76 req-f5e70757-4013-4927-a37c-4588852497d4 service nova] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] No waiting events found dispatching network-vif-unplugged-d25e13a4-7bac-4701-afa0-5fdd63ad7f3c {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1194.068587] env[62522]: WARNING nova.compute.manager [req-a89c7edc-db36-49a4-8abb-64d975c6ca76 req-f5e70757-4013-4927-a37c-4588852497d4 service nova] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Received unexpected event network-vif-unplugged-d25e13a4-7bac-4701-afa0-5fdd63ad7f3c for instance with vm_state shelved and task_state shelving_offloading. [ 1194.094828] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1194.096785] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b383e7d7-a1b3-458c-a4a9-22e4dde2a07c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.104989] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1194.105263] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-070fe787-d729-4419-a8ff-4d8bead1f3f4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.172406] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1194.172620] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1194.172798] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Deleting the datastore file [datastore1] 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1194.173085] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4515d1a3-a98f-4b3a-aab0-4a676fbce253 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.179785] env[62522]: DEBUG oslo_vmware.api [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1194.179785] env[62522]: value = "task-2416333" [ 1194.179785] env[62522]: _type = "Task" [ 1194.179785] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.188199] env[62522]: DEBUG oslo_vmware.api [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416333, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.214225] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: e1225c6f-9025-41ff-94fa-a55af49aeed2] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1194.482528] env[62522]: DEBUG oslo_vmware.api [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5211de9c-5a93-fdc6-5217-d97d3ebacf88, 'name': SearchDatastore_Task, 'duration_secs': 0.008911} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.483495] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1d46b88-7db9-4c9c-9996-27639863a28b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.491660] env[62522]: DEBUG oslo_vmware.api [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1194.491660] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]528913a6-14bd-68d3-23b4-a32093929e74" [ 1194.491660] env[62522]: _type = "Task" [ 1194.491660] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.500097] env[62522]: DEBUG oslo_vmware.api [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]528913a6-14bd-68d3-23b4-a32093929e74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.521386] env[62522]: INFO nova.compute.manager [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Took 16.82 seconds to build instance. [ 1194.549945] env[62522]: DEBUG oslo_vmware.api [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416331, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.45756} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.550442] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] a4cb5c19-9087-4354-9689-a99ae8924dc1/a4cb5c19-9087-4354-9689-a99ae8924dc1.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1194.550809] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1194.551241] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cf0e201b-c8cb-4c86-a77a-60ee4548ec19 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.560214] env[62522]: DEBUG oslo_vmware.api [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1194.560214] env[62522]: value = "task-2416334" [ 1194.560214] env[62522]: _type = "Task" [ 1194.560214] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.573534] env[62522]: DEBUG oslo_vmware.api [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416334, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.689600] env[62522]: DEBUG oslo_vmware.api [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416333, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.298342} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.689811] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1194.690235] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1194.690235] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1194.712300] env[62522]: INFO nova.scheduler.client.report [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Deleted allocations for instance 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b [ 1194.717988] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 7e5fc552-748f-4569-bd61-c81a52bb46b0] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1195.008526] env[62522]: DEBUG oslo_vmware.api [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]528913a6-14bd-68d3-23b4-a32093929e74, 'name': SearchDatastore_Task, 'duration_secs': 0.011171} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.008526] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1195.008526] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 08d7e318-ea68-4807-a300-ee4a7993647d/08d7e318-ea68-4807-a300-ee4a7993647d.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1195.008526] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-673d0562-e4dd-4d10-b767-b1e41aeb8dfb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.016832] env[62522]: DEBUG oslo_vmware.api [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1195.016832] env[62522]: value = "task-2416335" [ 1195.016832] env[62522]: _type = "Task" [ 1195.016832] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.026492] env[62522]: DEBUG oslo_concurrency.lockutils [None req-664a74aa-2912-4cf3-b01b-610a92adee21 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "cb7a19f1-6093-47ee-bbbc-a75dd5423f32" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.335s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1195.026793] env[62522]: DEBUG oslo_vmware.api [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416335, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.074119] env[62522]: DEBUG oslo_vmware.api [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416334, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066914} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.074119] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1195.074119] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56e152a7-cb85-49d4-9499-3a8f94698fe6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.097880] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] a4cb5c19-9087-4354-9689-a99ae8924dc1/a4cb5c19-9087-4354-9689-a99ae8924dc1.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1195.098229] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe6119d8-3acc-4b9c-b5ef-568ea1aaf6ad {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.119704] env[62522]: DEBUG oslo_vmware.api [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1195.119704] env[62522]: value = "task-2416336" [ 1195.119704] env[62522]: _type = "Task" [ 1195.119704] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.129137] env[62522]: DEBUG oslo_vmware.api [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416336, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.216893] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1195.217190] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1195.217420] env[62522]: DEBUG nova.objects.instance [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lazy-loading 'resources' on Instance uuid 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1195.224572] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: a08dfbb5-2bd0-4c85-8ce6-2e6ec96d0b72] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1195.254862] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-c21d9b3d-fb6f-4fa2-b00b-917659867bb2 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Volume attach. Driver type: vmdk {{(pid=62522) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1195.255197] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-c21d9b3d-fb6f-4fa2-b00b-917659867bb2 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489849', 'volume_id': '894e4909-b283-41ec-bddf-7ed9bff284d1', 'name': 'volume-894e4909-b283-41ec-bddf-7ed9bff284d1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f3894644-eb7e-4a6d-9029-4cd30466d6f8', 'attached_at': '', 'detached_at': '', 'volume_id': '894e4909-b283-41ec-bddf-7ed9bff284d1', 'serial': '894e4909-b283-41ec-bddf-7ed9bff284d1'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1195.256377] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e99ca7f-24f1-4a6a-a38e-7df5601ebc5b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.279332] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb6f1d9-1d7a-438b-a889-21b44433457f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.309386] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-c21d9b3d-fb6f-4fa2-b00b-917659867bb2 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] volume-894e4909-b283-41ec-bddf-7ed9bff284d1/volume-894e4909-b283-41ec-bddf-7ed9bff284d1.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1195.309744] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-043fb92c-d998-4d80-8ad9-c40bb784cbbd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.330147] env[62522]: DEBUG oslo_vmware.api [None req-c21d9b3d-fb6f-4fa2-b00b-917659867bb2 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 1195.330147] env[62522]: value = "task-2416337" [ 1195.330147] env[62522]: _type = "Task" [ 1195.330147] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.343643] env[62522]: DEBUG oslo_vmware.api [None req-c21d9b3d-fb6f-4fa2-b00b-917659867bb2 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416337, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.536406] env[62522]: DEBUG oslo_vmware.api [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416335, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.469067} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.536875] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 08d7e318-ea68-4807-a300-ee4a7993647d/08d7e318-ea68-4807-a300-ee4a7993647d.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1195.537214] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1195.537454] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-337c42f8-6373-4e6f-8da3-4864d5eb3767 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.546327] env[62522]: DEBUG oslo_vmware.api [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1195.546327] env[62522]: value = "task-2416338" [ 1195.546327] env[62522]: _type = "Task" [ 1195.546327] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.558742] env[62522]: DEBUG oslo_vmware.api [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416338, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.629248] env[62522]: DEBUG oslo_vmware.api [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416336, 'name': ReconfigVM_Task, 'duration_secs': 0.484342} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.629525] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Reconfigured VM instance instance-0000006a to attach disk [datastore2] a4cb5c19-9087-4354-9689-a99ae8924dc1/a4cb5c19-9087-4354-9689-a99ae8924dc1.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1195.630282] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3c4c03e5-d896-4aa8-94d5-f3df7662d7ae {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.635717] env[62522]: DEBUG oslo_vmware.api [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1195.635717] env[62522]: value = "task-2416339" [ 1195.635717] env[62522]: _type = "Task" [ 1195.635717] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.647796] env[62522]: DEBUG oslo_vmware.api [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416339, 'name': Rename_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.720670] env[62522]: DEBUG nova.objects.instance [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lazy-loading 'numa_topology' on Instance uuid 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1195.727924] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 917469c5-20be-4814-814f-a042415be021] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1195.842596] env[62522]: DEBUG oslo_vmware.api [None req-c21d9b3d-fb6f-4fa2-b00b-917659867bb2 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416337, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.038374] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7788d0a4-a55c-489f-b6f0-558d0acff81e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "24cf2f15-6f6a-4ded-b2fb-85093fddbf2b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1196.061537] env[62522]: DEBUG oslo_vmware.api [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416338, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069587} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.061994] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1196.062893] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c03c8de-c6f1-4c5b-816a-695e1277e7e6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.087954] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] 08d7e318-ea68-4807-a300-ee4a7993647d/08d7e318-ea68-4807-a300-ee4a7993647d.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1196.088461] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d4276b63-e39f-4f34-874d-36dd1732f78d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.113185] env[62522]: DEBUG nova.compute.manager [req-e91faf97-3695-4fee-817a-9a7aaf52f30b req-7f19b5ab-3ab9-491a-8cba-a64468b82bbf service nova] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Received event network-changed-d25e13a4-7bac-4701-afa0-5fdd63ad7f3c {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1196.113686] env[62522]: DEBUG nova.compute.manager [req-e91faf97-3695-4fee-817a-9a7aaf52f30b req-7f19b5ab-3ab9-491a-8cba-a64468b82bbf service nova] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Refreshing instance network info cache due to event network-changed-d25e13a4-7bac-4701-afa0-5fdd63ad7f3c. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1196.114024] env[62522]: DEBUG oslo_concurrency.lockutils [req-e91faf97-3695-4fee-817a-9a7aaf52f30b req-7f19b5ab-3ab9-491a-8cba-a64468b82bbf service nova] Acquiring lock "refresh_cache-24cf2f15-6f6a-4ded-b2fb-85093fddbf2b" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1196.114312] env[62522]: DEBUG oslo_concurrency.lockutils [req-e91faf97-3695-4fee-817a-9a7aaf52f30b req-7f19b5ab-3ab9-491a-8cba-a64468b82bbf service nova] Acquired lock "refresh_cache-24cf2f15-6f6a-4ded-b2fb-85093fddbf2b" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.114605] env[62522]: DEBUG nova.network.neutron [req-e91faf97-3695-4fee-817a-9a7aaf52f30b req-7f19b5ab-3ab9-491a-8cba-a64468b82bbf service nova] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Refreshing network info cache for port d25e13a4-7bac-4701-afa0-5fdd63ad7f3c {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1196.120092] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "27f4b976-7dff-49b0-9b00-7515cb976e72" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1196.120092] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "27f4b976-7dff-49b0-9b00-7515cb976e72" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1196.126130] env[62522]: DEBUG oslo_vmware.api [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1196.126130] env[62522]: value = "task-2416340" [ 1196.126130] env[62522]: _type = "Task" [ 1196.126130] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.139294] env[62522]: DEBUG oslo_vmware.api [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416340, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.146579] env[62522]: DEBUG oslo_vmware.api [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416339, 'name': Rename_Task, 'duration_secs': 0.1515} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.147000] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1196.147393] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f94deec3-4b93-4b4c-aa85-3246e6f13d9c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.153253] env[62522]: DEBUG oslo_vmware.api [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1196.153253] env[62522]: value = "task-2416341" [ 1196.153253] env[62522]: _type = "Task" [ 1196.153253] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.161149] env[62522]: DEBUG oslo_vmware.api [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416341, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.225974] env[62522]: DEBUG nova.objects.base [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Object Instance<24cf2f15-6f6a-4ded-b2fb-85093fddbf2b> lazy-loaded attributes: resources,numa_topology {{(pid=62522) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1196.232296] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: e60d5286-04dd-42bb-ae50-26b0a763d2bc] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1196.336838] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b8ba11f-e362-436e-a4da-ffcbcffcd59a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.347148] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b919c6e-1cc6-4215-b764-8d005a7639d6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.350651] env[62522]: DEBUG oslo_vmware.api [None req-c21d9b3d-fb6f-4fa2-b00b-917659867bb2 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416337, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.383651] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cb1efe0-3742-41d1-92a7-59c1dfa38431 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.391913] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0568c13c-f391-4a3b-8465-bb3512540314 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.406963] env[62522]: DEBUG nova.compute.provider_tree [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1196.623251] env[62522]: DEBUG nova.compute.manager [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1196.635047] env[62522]: DEBUG oslo_vmware.api [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416340, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.662741] env[62522]: DEBUG oslo_vmware.api [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416341, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.733411] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 8539afc0-1753-4c37-9fc9-25ec97b97243] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1196.841211] env[62522]: DEBUG oslo_vmware.api [None req-c21d9b3d-fb6f-4fa2-b00b-917659867bb2 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416337, 'name': ReconfigVM_Task, 'duration_secs': 1.132245} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.841570] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-c21d9b3d-fb6f-4fa2-b00b-917659867bb2 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Reconfigured VM instance instance-00000064 to attach disk [datastore1] volume-894e4909-b283-41ec-bddf-7ed9bff284d1/volume-894e4909-b283-41ec-bddf-7ed9bff284d1.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1196.846132] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-311b71fe-0f89-4a28-8058-836d6aa17046 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.860569] env[62522]: DEBUG oslo_vmware.api [None req-c21d9b3d-fb6f-4fa2-b00b-917659867bb2 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 1196.860569] env[62522]: value = "task-2416342" [ 1196.860569] env[62522]: _type = "Task" [ 1196.860569] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.871648] env[62522]: DEBUG oslo_vmware.api [None req-c21d9b3d-fb6f-4fa2-b00b-917659867bb2 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416342, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.910198] env[62522]: DEBUG nova.network.neutron [req-e91faf97-3695-4fee-817a-9a7aaf52f30b req-7f19b5ab-3ab9-491a-8cba-a64468b82bbf service nova] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Updated VIF entry in instance network info cache for port d25e13a4-7bac-4701-afa0-5fdd63ad7f3c. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1196.910573] env[62522]: DEBUG nova.network.neutron [req-e91faf97-3695-4fee-817a-9a7aaf52f30b req-7f19b5ab-3ab9-491a-8cba-a64468b82bbf service nova] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Updating instance_info_cache with network_info: [{"id": "d25e13a4-7bac-4701-afa0-5fdd63ad7f3c", "address": "fa:16:3e:02:12:f6", "network": {"id": "21d0ca22-5509-4f9b-b167-f9fde2dac808", "bridge": null, "label": "tempest-DeleteServersTestJSON-547933771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a06421250694a98b13ff34ad816dc75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapd25e13a4-7b", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.912260] env[62522]: DEBUG nova.scheduler.client.report [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1197.138381] env[62522]: DEBUG oslo_vmware.api [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416340, 'name': ReconfigVM_Task, 'duration_secs': 0.593225} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.138657] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Reconfigured VM instance instance-00000068 to attach disk [datastore1] 08d7e318-ea68-4807-a300-ee4a7993647d/08d7e318-ea68-4807-a300-ee4a7993647d.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1197.139373] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c3b0374c-d57e-4c41-aa16-a52a621b85e5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.145809] env[62522]: DEBUG oslo_vmware.api [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1197.145809] env[62522]: value = "task-2416343" [ 1197.145809] env[62522]: _type = "Task" [ 1197.145809] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.150897] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1197.160635] env[62522]: DEBUG oslo_vmware.api [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416343, 'name': Rename_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.166016] env[62522]: DEBUG oslo_vmware.api [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416341, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.236613] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 17ec01e7-9735-4771-a73c-c4c7634d59f1] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1197.373013] env[62522]: DEBUG oslo_vmware.api [None req-c21d9b3d-fb6f-4fa2-b00b-917659867bb2 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416342, 'name': ReconfigVM_Task, 'duration_secs': 0.250322} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.373457] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-c21d9b3d-fb6f-4fa2-b00b-917659867bb2 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489849', 'volume_id': '894e4909-b283-41ec-bddf-7ed9bff284d1', 'name': 'volume-894e4909-b283-41ec-bddf-7ed9bff284d1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f3894644-eb7e-4a6d-9029-4cd30466d6f8', 'attached_at': '', 'detached_at': '', 'volume_id': '894e4909-b283-41ec-bddf-7ed9bff284d1', 'serial': '894e4909-b283-41ec-bddf-7ed9bff284d1'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1197.417746] env[62522]: DEBUG oslo_concurrency.lockutils [req-e91faf97-3695-4fee-817a-9a7aaf52f30b req-7f19b5ab-3ab9-491a-8cba-a64468b82bbf service nova] Releasing lock "refresh_cache-24cf2f15-6f6a-4ded-b2fb-85093fddbf2b" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1197.418642] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.201s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1197.421230] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.270s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1197.422759] env[62522]: INFO nova.compute.claims [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1197.655613] env[62522]: DEBUG oslo_vmware.api [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416343, 'name': Rename_Task, 'duration_secs': 0.372546} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.658451] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1197.658708] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8cfaa23b-2231-4828-a727-5a7e158aac41 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.664449] env[62522]: DEBUG oslo_vmware.api [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416341, 'name': PowerOnVM_Task, 'duration_secs': 1.197366} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.665577] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1197.665785] env[62522]: INFO nova.compute.manager [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Took 8.59 seconds to spawn the instance on the hypervisor. [ 1197.665967] env[62522]: DEBUG nova.compute.manager [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1197.666285] env[62522]: DEBUG oslo_vmware.api [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1197.666285] env[62522]: value = "task-2416344" [ 1197.666285] env[62522]: _type = "Task" [ 1197.666285] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.666935] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-581c99c6-dc9f-413f-a494-e0f59f11cfcb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.681028] env[62522]: DEBUG oslo_vmware.api [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416344, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.742022] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: fcd0eef6-d059-4495-a982-058b6c9626d1] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1197.932727] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70602422-214d-48cd-8650-b1fd38b32bc9 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "24cf2f15-6f6a-4ded-b2fb-85093fddbf2b" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 22.091s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1197.933670] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7788d0a4-a55c-489f-b6f0-558d0acff81e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "24cf2f15-6f6a-4ded-b2fb-85093fddbf2b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 1.896s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1197.934043] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7788d0a4-a55c-489f-b6f0-558d0acff81e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "24cf2f15-6f6a-4ded-b2fb-85093fddbf2b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1197.934124] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7788d0a4-a55c-489f-b6f0-558d0acff81e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "24cf2f15-6f6a-4ded-b2fb-85093fddbf2b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1197.934482] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7788d0a4-a55c-489f-b6f0-558d0acff81e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "24cf2f15-6f6a-4ded-b2fb-85093fddbf2b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1197.937430] env[62522]: INFO nova.compute.manager [None req-7788d0a4-a55c-489f-b6f0-558d0acff81e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Terminating instance [ 1198.124301] env[62522]: DEBUG nova.compute.manager [req-8b1ea06f-74ba-4281-9676-e5f866c19ab1 req-5374ac33-56d8-41c1-93a0-c1c3f3e4dd65 service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Received event network-changed-33665d0f-b7dd-4d62-86d5-8ccb8f178e97 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1198.124301] env[62522]: DEBUG nova.compute.manager [req-8b1ea06f-74ba-4281-9676-e5f866c19ab1 req-5374ac33-56d8-41c1-93a0-c1c3f3e4dd65 service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Refreshing instance network info cache due to event network-changed-33665d0f-b7dd-4d62-86d5-8ccb8f178e97. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1198.124495] env[62522]: DEBUG oslo_concurrency.lockutils [req-8b1ea06f-74ba-4281-9676-e5f866c19ab1 req-5374ac33-56d8-41c1-93a0-c1c3f3e4dd65 service nova] Acquiring lock "refresh_cache-cb7a19f1-6093-47ee-bbbc-a75dd5423f32" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1198.124731] env[62522]: DEBUG oslo_concurrency.lockutils [req-8b1ea06f-74ba-4281-9676-e5f866c19ab1 req-5374ac33-56d8-41c1-93a0-c1c3f3e4dd65 service nova] Acquired lock "refresh_cache-cb7a19f1-6093-47ee-bbbc-a75dd5423f32" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1198.124910] env[62522]: DEBUG nova.network.neutron [req-8b1ea06f-74ba-4281-9676-e5f866c19ab1 req-5374ac33-56d8-41c1-93a0-c1c3f3e4dd65 service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Refreshing network info cache for port 33665d0f-b7dd-4d62-86d5-8ccb8f178e97 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1198.182068] env[62522]: DEBUG oslo_vmware.api [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416344, 'name': PowerOnVM_Task, 'duration_secs': 0.497154} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.182337] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1198.183086] env[62522]: DEBUG nova.compute.manager [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1198.184798] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e96c58eb-9aa7-4e90-bb62-d19204e95906 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.187526] env[62522]: INFO nova.compute.manager [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Took 17.68 seconds to build instance. [ 1198.245343] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 0bae8c37-20fe-4f9c-8b3d-5b4b59c4083c] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1198.410288] env[62522]: DEBUG nova.objects.instance [None req-c21d9b3d-fb6f-4fa2-b00b-917659867bb2 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lazy-loading 'flavor' on Instance uuid f3894644-eb7e-4a6d-9029-4cd30466d6f8 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1198.441878] env[62522]: DEBUG nova.compute.manager [None req-7788d0a4-a55c-489f-b6f0-558d0acff81e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1198.442176] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7788d0a4-a55c-489f-b6f0-558d0acff81e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1198.442736] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-76fd9d3e-f6bd-4407-9522-a59c8f30366e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.452666] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a435c57a-f6a9-4df8-b5de-1ab56526794b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.486179] env[62522]: WARNING nova.virt.vmwareapi.vmops [None req-7788d0a4-a55c-489f-b6f0-558d0acff81e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b could not be found. [ 1198.486419] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7788d0a4-a55c-489f-b6f0-558d0acff81e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1198.486616] env[62522]: INFO nova.compute.manager [None req-7788d0a4-a55c-489f-b6f0-558d0acff81e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1198.486865] env[62522]: DEBUG oslo.service.loopingcall [None req-7788d0a4-a55c-489f-b6f0-558d0acff81e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1198.487131] env[62522]: DEBUG nova.compute.manager [-] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1198.487227] env[62522]: DEBUG nova.network.neutron [-] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1198.542477] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e4e395c-fd05-488a-a26f-dd6a6977eacd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.549559] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7920296d-6485-4162-b5e2-b10cec8fd3d6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.580583] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-610aa68f-c907-49ea-b27b-5721743b926e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.587381] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-258fd524-31f7-4bb3-9268-31944d04c9b8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.600438] env[62522]: DEBUG nova.compute.provider_tree [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1198.691187] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9401df30-ad4a-40c3-9f2b-cc1d5a57a159 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "a4cb5c19-9087-4354-9689-a99ae8924dc1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.196s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1198.695263] env[62522]: INFO nova.compute.manager [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] bringing vm to original state: 'stopped' [ 1198.749245] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 8b21b749-b872-43f7-a2c5-aefee6c5f3a1] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1198.916381] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c21d9b3d-fb6f-4fa2-b00b-917659867bb2 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.288s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1199.104070] env[62522]: DEBUG nova.scheduler.client.report [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1199.137790] env[62522]: DEBUG nova.network.neutron [req-8b1ea06f-74ba-4281-9676-e5f866c19ab1 req-5374ac33-56d8-41c1-93a0-c1c3f3e4dd65 service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Updated VIF entry in instance network info cache for port 33665d0f-b7dd-4d62-86d5-8ccb8f178e97. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1199.138186] env[62522]: DEBUG nova.network.neutron [req-8b1ea06f-74ba-4281-9676-e5f866c19ab1 req-5374ac33-56d8-41c1-93a0-c1c3f3e4dd65 service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Updating instance_info_cache with network_info: [{"id": "33665d0f-b7dd-4d62-86d5-8ccb8f178e97", "address": "fa:16:3e:1d:d3:51", "network": {"id": "949f3536-8a7e-4edf-b6cc-6a264fe5fe83", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1891232839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f93394feaa4f4b61a5d3d670d32ec599", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33665d0f-b7", "ovs_interfaceid": "33665d0f-b7dd-4d62-86d5-8ccb8f178e97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.252377] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 72e054d2-79bb-4ef8-82d1-4e67ba0ef20a] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1199.266752] env[62522]: DEBUG oslo_concurrency.lockutils [None req-aff65b76-a68a-4e70-b929-a785cf537071 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquiring lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1199.267427] env[62522]: DEBUG oslo_concurrency.lockutils [None req-aff65b76-a68a-4e70-b929-a785cf537071 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1199.475509] env[62522]: DEBUG nova.network.neutron [-] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.609345] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.188s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1199.609860] env[62522]: DEBUG nova.compute.manager [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1199.640822] env[62522]: DEBUG oslo_concurrency.lockutils [req-8b1ea06f-74ba-4281-9676-e5f866c19ab1 req-5374ac33-56d8-41c1-93a0-c1c3f3e4dd65 service nova] Releasing lock "refresh_cache-cb7a19f1-6093-47ee-bbbc-a75dd5423f32" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1199.702734] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "08d7e318-ea68-4807-a300-ee4a7993647d" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1199.703090] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "08d7e318-ea68-4807-a300-ee4a7993647d" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1199.703324] env[62522]: DEBUG nova.compute.manager [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1199.704243] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15d8f952-5dce-467f-8abe-5f094f78562a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.711557] env[62522]: DEBUG nova.compute.manager [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62522) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1199.755612] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: ea9ea9d5-99f6-4b53-a7d8-eb2bdceb8c6a] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1199.769658] env[62522]: INFO nova.compute.manager [None req-aff65b76-a68a-4e70-b929-a785cf537071 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Detaching volume a0aa294f-c381-417e-981c-8709a38bb633 [ 1199.804771] env[62522]: INFO nova.virt.block_device [None req-aff65b76-a68a-4e70-b929-a785cf537071 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Attempting to driver detach volume a0aa294f-c381-417e-981c-8709a38bb633 from mountpoint /dev/sdb [ 1199.805035] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-aff65b76-a68a-4e70-b929-a785cf537071 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Volume detach. Driver type: vmdk {{(pid=62522) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1199.805227] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-aff65b76-a68a-4e70-b929-a785cf537071 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489847', 'volume_id': 'a0aa294f-c381-417e-981c-8709a38bb633', 'name': 'volume-a0aa294f-c381-417e-981c-8709a38bb633', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f3894644-eb7e-4a6d-9029-4cd30466d6f8', 'attached_at': '', 'detached_at': '', 'volume_id': 'a0aa294f-c381-417e-981c-8709a38bb633', 'serial': 'a0aa294f-c381-417e-981c-8709a38bb633'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1199.806094] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-656e9a3f-be41-4612-a84c-acebff6978a5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.830141] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-775050b0-fbed-4cb3-aea3-5805f85e7ac2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.836961] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5ee74d3-b86b-4f1f-a698-0e0e87a34690 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.860867] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6be970c-a9f1-45bc-bda7-151469bc2201 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.876184] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-aff65b76-a68a-4e70-b929-a785cf537071 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] The volume has not been displaced from its original location: [datastore1] volume-a0aa294f-c381-417e-981c-8709a38bb633/volume-a0aa294f-c381-417e-981c-8709a38bb633.vmdk. No consolidation needed. {{(pid=62522) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1199.881416] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-aff65b76-a68a-4e70-b929-a785cf537071 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Reconfiguring VM instance instance-00000064 to detach disk 2001 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1199.881713] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-39e3c73f-2fbf-4c26-8479-138dc0822d54 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.899626] env[62522]: DEBUG oslo_vmware.api [None req-aff65b76-a68a-4e70-b929-a785cf537071 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 1199.899626] env[62522]: value = "task-2416345" [ 1199.899626] env[62522]: _type = "Task" [ 1199.899626] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.907317] env[62522]: DEBUG oslo_vmware.api [None req-aff65b76-a68a-4e70-b929-a785cf537071 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416345, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.978596] env[62522]: INFO nova.compute.manager [-] [instance: 24cf2f15-6f6a-4ded-b2fb-85093fddbf2b] Took 1.49 seconds to deallocate network for instance. [ 1200.115192] env[62522]: DEBUG nova.compute.utils [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1200.116624] env[62522]: DEBUG nova.compute.manager [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1200.116725] env[62522]: DEBUG nova.network.neutron [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1200.154645] env[62522]: DEBUG nova.compute.manager [req-c3d6ef0c-4033-4e7e-bfbd-eefb2915415a req-53eb4068-dac1-4d0e-81ca-cea9807ea4a6 service nova] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Received event network-changed-fb503ded-334f-4a04-b774-61284edf466f {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1200.154816] env[62522]: DEBUG nova.compute.manager [req-c3d6ef0c-4033-4e7e-bfbd-eefb2915415a req-53eb4068-dac1-4d0e-81ca-cea9807ea4a6 service nova] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Refreshing instance network info cache due to event network-changed-fb503ded-334f-4a04-b774-61284edf466f. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1200.154939] env[62522]: DEBUG oslo_concurrency.lockutils [req-c3d6ef0c-4033-4e7e-bfbd-eefb2915415a req-53eb4068-dac1-4d0e-81ca-cea9807ea4a6 service nova] Acquiring lock "refresh_cache-a4cb5c19-9087-4354-9689-a99ae8924dc1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1200.155084] env[62522]: DEBUG oslo_concurrency.lockutils [req-c3d6ef0c-4033-4e7e-bfbd-eefb2915415a req-53eb4068-dac1-4d0e-81ca-cea9807ea4a6 service nova] Acquired lock "refresh_cache-a4cb5c19-9087-4354-9689-a99ae8924dc1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1200.155265] env[62522]: DEBUG nova.network.neutron [req-c3d6ef0c-4033-4e7e-bfbd-eefb2915415a req-53eb4068-dac1-4d0e-81ca-cea9807ea4a6 service nova] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Refreshing network info cache for port fb503ded-334f-4a04-b774-61284edf466f {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1200.172304] env[62522]: DEBUG nova.policy [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '607183068c444260afbec94a63fde1d4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bab9d5d3c27d4c218b88e4a029300a66', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1200.215738] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1200.216054] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-34af5967-760d-4c52-95e2-5815dfa886c0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.224027] env[62522]: DEBUG oslo_vmware.api [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1200.224027] env[62522]: value = "task-2416346" [ 1200.224027] env[62522]: _type = "Task" [ 1200.224027] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.232654] env[62522]: DEBUG oslo_vmware.api [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416346, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.259373] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 04a9d357-d094-487b-8f09-2f7e0c35f0d7] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1200.408971] env[62522]: DEBUG oslo_vmware.api [None req-aff65b76-a68a-4e70-b929-a785cf537071 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416345, 'name': ReconfigVM_Task, 'duration_secs': 0.443929} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.409235] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-aff65b76-a68a-4e70-b929-a785cf537071 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Reconfigured VM instance instance-00000064 to detach disk 2001 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1200.413907] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2d7394a-ce91-4847-a562-d6fb81ac1dff {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.435311] env[62522]: DEBUG oslo_vmware.api [None req-aff65b76-a68a-4e70-b929-a785cf537071 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 1200.435311] env[62522]: value = "task-2416347" [ 1200.435311] env[62522]: _type = "Task" [ 1200.435311] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.444840] env[62522]: DEBUG oslo_vmware.api [None req-aff65b76-a68a-4e70-b929-a785cf537071 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416347, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.531152] env[62522]: DEBUG nova.network.neutron [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Successfully created port: cf4b3978-2fa2-4182-9422-abf29faafcf6 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1200.619982] env[62522]: DEBUG nova.compute.manager [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1200.734102] env[62522]: DEBUG oslo_vmware.api [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416346, 'name': PowerOffVM_Task, 'duration_secs': 0.253071} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.734394] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1200.734612] env[62522]: DEBUG nova.compute.manager [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1200.735366] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bcaa948-5e21-475e-a650-20b49405eca7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.762891] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 043a0a1b-268c-4caa-b1f7-cc7d70c3b314] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1200.945394] env[62522]: DEBUG oslo_vmware.api [None req-aff65b76-a68a-4e70-b929-a785cf537071 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416347, 'name': ReconfigVM_Task, 'duration_secs': 0.182794} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.945766] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-aff65b76-a68a-4e70-b929-a785cf537071 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489847', 'volume_id': 'a0aa294f-c381-417e-981c-8709a38bb633', 'name': 'volume-a0aa294f-c381-417e-981c-8709a38bb633', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f3894644-eb7e-4a6d-9029-4cd30466d6f8', 'attached_at': '', 'detached_at': '', 'volume_id': 'a0aa294f-c381-417e-981c-8709a38bb633', 'serial': 'a0aa294f-c381-417e-981c-8709a38bb633'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1201.008821] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7788d0a4-a55c-489f-b6f0-558d0acff81e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "24cf2f15-6f6a-4ded-b2fb-85093fddbf2b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.075s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.173251] env[62522]: DEBUG nova.network.neutron [req-c3d6ef0c-4033-4e7e-bfbd-eefb2915415a req-53eb4068-dac1-4d0e-81ca-cea9807ea4a6 service nova] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Updated VIF entry in instance network info cache for port fb503ded-334f-4a04-b774-61284edf466f. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1201.173251] env[62522]: DEBUG nova.network.neutron [req-c3d6ef0c-4033-4e7e-bfbd-eefb2915415a req-53eb4068-dac1-4d0e-81ca-cea9807ea4a6 service nova] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Updating instance_info_cache with network_info: [{"id": "fb503ded-334f-4a04-b774-61284edf466f", "address": "fa:16:3e:0d:45:12", "network": {"id": "2c9c537f-91b6-4217-8eaf-dc187f4ce7d5", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1154766161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fa792663b4ac41b7bf4c5e4b290f9b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb503ded-33", "ovs_interfaceid": "fb503ded-334f-4a04-b774-61284edf466f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1201.249187] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "08d7e318-ea68-4807-a300-ee4a7993647d" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.546s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.267263] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 845f99b8-4a9d-4fbe-89e1-825a5ddd01f2] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1201.486510] env[62522]: DEBUG nova.objects.instance [None req-aff65b76-a68a-4e70-b929-a785cf537071 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lazy-loading 'flavor' on Instance uuid f3894644-eb7e-4a6d-9029-4cd30466d6f8 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1201.630980] env[62522]: DEBUG nova.compute.manager [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1201.656787] env[62522]: DEBUG nova.virt.hardware [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1201.657041] env[62522]: DEBUG nova.virt.hardware [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1201.657205] env[62522]: DEBUG nova.virt.hardware [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1201.657418] env[62522]: DEBUG nova.virt.hardware [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1201.657529] env[62522]: DEBUG nova.virt.hardware [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1201.657675] env[62522]: DEBUG nova.virt.hardware [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1201.657902] env[62522]: DEBUG nova.virt.hardware [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1201.658096] env[62522]: DEBUG nova.virt.hardware [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1201.658282] env[62522]: DEBUG nova.virt.hardware [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1201.658475] env[62522]: DEBUG nova.virt.hardware [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1201.658757] env[62522]: DEBUG nova.virt.hardware [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1201.659687] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b402d25d-8f42-4a9b-ac80-539555f383d8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.667576] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d61957f-8e6c-45b2-a230-685bf3c70a13 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.680731] env[62522]: DEBUG oslo_concurrency.lockutils [req-c3d6ef0c-4033-4e7e-bfbd-eefb2915415a req-53eb4068-dac1-4d0e-81ca-cea9807ea4a6 service nova] Releasing lock "refresh_cache-a4cb5c19-9087-4354-9689-a99ae8924dc1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1201.756793] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1201.757062] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1201.757246] env[62522]: DEBUG nova.objects.instance [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62522) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1201.769029] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 35e6b9af-d0ef-419f-be9f-e6d4bbb12bc6] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1202.125523] env[62522]: DEBUG oslo_concurrency.lockutils [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "bbb8ba81-9fed-419c-b2f9-ac5baaac3b88" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1202.125798] env[62522]: DEBUG oslo_concurrency.lockutils [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "bbb8ba81-9fed-419c-b2f9-ac5baaac3b88" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1202.271557] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: c28d2907-5b59-4df8-91a8-4ba0f2047d89] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1202.348280] env[62522]: DEBUG nova.compute.manager [req-f31da9b1-095d-436c-9d3e-ba15ba9392d6 req-8ff2b908-be14-4f63-87a6-2dca66935d70 service nova] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Received event network-vif-plugged-cf4b3978-2fa2-4182-9422-abf29faafcf6 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1202.348529] env[62522]: DEBUG oslo_concurrency.lockutils [req-f31da9b1-095d-436c-9d3e-ba15ba9392d6 req-8ff2b908-be14-4f63-87a6-2dca66935d70 service nova] Acquiring lock "27f4b976-7dff-49b0-9b00-7515cb976e72-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1202.348736] env[62522]: DEBUG oslo_concurrency.lockutils [req-f31da9b1-095d-436c-9d3e-ba15ba9392d6 req-8ff2b908-be14-4f63-87a6-2dca66935d70 service nova] Lock "27f4b976-7dff-49b0-9b00-7515cb976e72-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1202.348900] env[62522]: DEBUG oslo_concurrency.lockutils [req-f31da9b1-095d-436c-9d3e-ba15ba9392d6 req-8ff2b908-be14-4f63-87a6-2dca66935d70 service nova] Lock "27f4b976-7dff-49b0-9b00-7515cb976e72-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1202.349077] env[62522]: DEBUG nova.compute.manager [req-f31da9b1-095d-436c-9d3e-ba15ba9392d6 req-8ff2b908-be14-4f63-87a6-2dca66935d70 service nova] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] No waiting events found dispatching network-vif-plugged-cf4b3978-2fa2-4182-9422-abf29faafcf6 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1202.349243] env[62522]: WARNING nova.compute.manager [req-f31da9b1-095d-436c-9d3e-ba15ba9392d6 req-8ff2b908-be14-4f63-87a6-2dca66935d70 service nova] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Received unexpected event network-vif-plugged-cf4b3978-2fa2-4182-9422-abf29faafcf6 for instance with vm_state building and task_state spawning. [ 1202.435593] env[62522]: DEBUG oslo_concurrency.lockutils [None req-61d1385d-424d-4036-833e-8df1ef7c6dda tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "08d7e318-ea68-4807-a300-ee4a7993647d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1202.435948] env[62522]: DEBUG oslo_concurrency.lockutils [None req-61d1385d-424d-4036-833e-8df1ef7c6dda tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "08d7e318-ea68-4807-a300-ee4a7993647d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1202.436296] env[62522]: DEBUG oslo_concurrency.lockutils [None req-61d1385d-424d-4036-833e-8df1ef7c6dda tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "08d7e318-ea68-4807-a300-ee4a7993647d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1202.436507] env[62522]: DEBUG oslo_concurrency.lockutils [None req-61d1385d-424d-4036-833e-8df1ef7c6dda tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "08d7e318-ea68-4807-a300-ee4a7993647d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1202.436747] env[62522]: DEBUG oslo_concurrency.lockutils [None req-61d1385d-424d-4036-833e-8df1ef7c6dda tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "08d7e318-ea68-4807-a300-ee4a7993647d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1202.439600] env[62522]: DEBUG nova.network.neutron [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Successfully updated port: cf4b3978-2fa2-4182-9422-abf29faafcf6 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1202.441534] env[62522]: INFO nova.compute.manager [None req-61d1385d-424d-4036-833e-8df1ef7c6dda tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Terminating instance [ 1202.492734] env[62522]: DEBUG oslo_concurrency.lockutils [None req-aff65b76-a68a-4e70-b929-a785cf537071 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.226s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1202.536700] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7ab9198f-4736-4e63-93e0-f15774b96630 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquiring lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1202.536979] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7ab9198f-4736-4e63-93e0-f15774b96630 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1202.628661] env[62522]: DEBUG nova.compute.manager [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1202.765536] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5be42ce8-d0e8-4b61-b63b-cfa4403dff7f tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.008s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1202.774347] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 9141ffdd-cbfa-4efe-a01b-dc1326af474c] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1202.944036] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "refresh_cache-27f4b976-7dff-49b0-9b00-7515cb976e72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1202.944304] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquired lock "refresh_cache-27f4b976-7dff-49b0-9b00-7515cb976e72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1202.944358] env[62522]: DEBUG nova.network.neutron [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1202.946651] env[62522]: DEBUG nova.compute.manager [None req-61d1385d-424d-4036-833e-8df1ef7c6dda tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1202.946651] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-61d1385d-424d-4036-833e-8df1ef7c6dda tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1202.947684] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74c703bb-e0c6-45d0-b9dd-b8bfa43f1e44 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.956331] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-61d1385d-424d-4036-833e-8df1ef7c6dda tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1202.956562] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-900d7dd4-1535-4391-b16e-e5d48cb7f23e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.030070] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-61d1385d-424d-4036-833e-8df1ef7c6dda tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1203.030305] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-61d1385d-424d-4036-833e-8df1ef7c6dda tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1203.030724] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-61d1385d-424d-4036-833e-8df1ef7c6dda tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Deleting the datastore file [datastore1] 08d7e318-ea68-4807-a300-ee4a7993647d {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1203.030724] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d4545999-bbc9-452d-9b8d-618db8e1414e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.037078] env[62522]: DEBUG oslo_vmware.api [None req-61d1385d-424d-4036-833e-8df1ef7c6dda tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1203.037078] env[62522]: value = "task-2416349" [ 1203.037078] env[62522]: _type = "Task" [ 1203.037078] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.040903] env[62522]: INFO nova.compute.manager [None req-7ab9198f-4736-4e63-93e0-f15774b96630 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Detaching volume 894e4909-b283-41ec-bddf-7ed9bff284d1 [ 1203.047609] env[62522]: DEBUG oslo_vmware.api [None req-61d1385d-424d-4036-833e-8df1ef7c6dda tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416349, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.079204] env[62522]: INFO nova.virt.block_device [None req-7ab9198f-4736-4e63-93e0-f15774b96630 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Attempting to driver detach volume 894e4909-b283-41ec-bddf-7ed9bff284d1 from mountpoint /dev/sdc [ 1203.079446] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ab9198f-4736-4e63-93e0-f15774b96630 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Volume detach. Driver type: vmdk {{(pid=62522) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1203.079633] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ab9198f-4736-4e63-93e0-f15774b96630 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489849', 'volume_id': '894e4909-b283-41ec-bddf-7ed9bff284d1', 'name': 'volume-894e4909-b283-41ec-bddf-7ed9bff284d1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f3894644-eb7e-4a6d-9029-4cd30466d6f8', 'attached_at': '', 'detached_at': '', 'volume_id': '894e4909-b283-41ec-bddf-7ed9bff284d1', 'serial': '894e4909-b283-41ec-bddf-7ed9bff284d1'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1203.080550] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd728092-8430-4f4c-bb52-81d1f6df47cf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.104016] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11ed6618-3dcd-473a-93e8-5b747ad5c289 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.111258] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef1bb3dd-bad3-4389-b56d-693481dc0c18 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.130990] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f794780d-17d7-4dbe-ad55-8471897d86d1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.148129] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ab9198f-4736-4e63-93e0-f15774b96630 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] The volume has not been displaced from its original location: [datastore1] volume-894e4909-b283-41ec-bddf-7ed9bff284d1/volume-894e4909-b283-41ec-bddf-7ed9bff284d1.vmdk. No consolidation needed. {{(pid=62522) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1203.153372] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ab9198f-4736-4e63-93e0-f15774b96630 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Reconfiguring VM instance instance-00000064 to detach disk 2002 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1203.154516] env[62522]: DEBUG oslo_concurrency.lockutils [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1203.154755] env[62522]: DEBUG oslo_concurrency.lockutils [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1203.156197] env[62522]: INFO nova.compute.claims [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1203.158437] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b1859331-a436-4576-ace3-41a008c2a40c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.176765] env[62522]: DEBUG oslo_vmware.api [None req-7ab9198f-4736-4e63-93e0-f15774b96630 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 1203.176765] env[62522]: value = "task-2416350" [ 1203.176765] env[62522]: _type = "Task" [ 1203.176765] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.184236] env[62522]: DEBUG oslo_vmware.api [None req-7ab9198f-4736-4e63-93e0-f15774b96630 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416350, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.277027] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: ff6637e9-2a67-4302-9769-24ec045538d4] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1203.475989] env[62522]: DEBUG nova.network.neutron [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1203.546937] env[62522]: DEBUG oslo_vmware.api [None req-61d1385d-424d-4036-833e-8df1ef7c6dda tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416349, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.20317} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.547234] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-61d1385d-424d-4036-833e-8df1ef7c6dda tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1203.547426] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-61d1385d-424d-4036-833e-8df1ef7c6dda tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1203.547606] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-61d1385d-424d-4036-833e-8df1ef7c6dda tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1203.547779] env[62522]: INFO nova.compute.manager [None req-61d1385d-424d-4036-833e-8df1ef7c6dda tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1203.548036] env[62522]: DEBUG oslo.service.loopingcall [None req-61d1385d-424d-4036-833e-8df1ef7c6dda tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1203.548238] env[62522]: DEBUG nova.compute.manager [-] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1203.548335] env[62522]: DEBUG nova.network.neutron [-] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1203.687343] env[62522]: DEBUG oslo_vmware.api [None req-7ab9198f-4736-4e63-93e0-f15774b96630 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416350, 'name': ReconfigVM_Task, 'duration_secs': 0.448531} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.687617] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ab9198f-4736-4e63-93e0-f15774b96630 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Reconfigured VM instance instance-00000064 to detach disk 2002 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1203.693186] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8e725281-6ca5-493e-beb1-70c5bcf5d24c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.707944] env[62522]: DEBUG oslo_vmware.api [None req-7ab9198f-4736-4e63-93e0-f15774b96630 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 1203.707944] env[62522]: value = "task-2416351" [ 1203.707944] env[62522]: _type = "Task" [ 1203.707944] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.715926] env[62522]: DEBUG oslo_vmware.api [None req-7ab9198f-4736-4e63-93e0-f15774b96630 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416351, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.728322] env[62522]: DEBUG nova.network.neutron [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Updating instance_info_cache with network_info: [{"id": "cf4b3978-2fa2-4182-9422-abf29faafcf6", "address": "fa:16:3e:74:26:e7", "network": {"id": "e3153dad-6ab7-45a3-bda4-5ebbd95258f4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-521501740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab9d5d3c27d4c218b88e4a029300a66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf4b3978-2f", "ovs_interfaceid": "cf4b3978-2fa2-4182-9422-abf29faafcf6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1203.780561] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 6ef27aee-719c-4089-825d-fc117e867bde] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1204.217037] env[62522]: DEBUG oslo_vmware.api [None req-7ab9198f-4736-4e63-93e0-f15774b96630 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416351, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.233525] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Releasing lock "refresh_cache-27f4b976-7dff-49b0-9b00-7515cb976e72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1204.233865] env[62522]: DEBUG nova.compute.manager [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Instance network_info: |[{"id": "cf4b3978-2fa2-4182-9422-abf29faafcf6", "address": "fa:16:3e:74:26:e7", "network": {"id": "e3153dad-6ab7-45a3-bda4-5ebbd95258f4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-521501740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab9d5d3c27d4c218b88e4a029300a66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf4b3978-2f", "ovs_interfaceid": "cf4b3978-2fa2-4182-9422-abf29faafcf6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1204.234525] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:26:e7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f625f389-b7cf-49b9-998a-87f3a9e3f234', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cf4b3978-2fa2-4182-9422-abf29faafcf6', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1204.241970] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Creating folder: Project (bab9d5d3c27d4c218b88e4a029300a66). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1204.242253] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-88a73c32-4532-4aef-a776-e105e86814f2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.255662] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Created folder: Project (bab9d5d3c27d4c218b88e4a029300a66) in parent group-v489562. [ 1204.255828] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Creating folder: Instances. Parent ref: group-v489854. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1204.256084] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9768bab1-8155-4cb4-86ab-65e45de02f5c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.264985] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Created folder: Instances in parent group-v489854. [ 1204.265245] env[62522]: DEBUG oslo.service.loopingcall [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1204.267710] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1204.268139] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0441ddbb-0bcf-4a82-af3a-4ac9a2c88cc3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.284576] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 74e52638-d284-4bd1-8cff-c7aca9426f75] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1204.292871] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1204.292871] env[62522]: value = "task-2416354" [ 1204.292871] env[62522]: _type = "Task" [ 1204.292871] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.302894] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416354, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.312191] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3575e7b-d7a6-480f-a049-d14449507171 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.319326] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f5916bb-69b3-425e-820f-40a5ade03505 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.351185] env[62522]: DEBUG nova.network.neutron [-] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1204.353669] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dd6dd22-280d-45a8-a833-c2477e8d1720 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.363807] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-684c438a-f676-4f71-b7d6-7f0c38fdf1d9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.378728] env[62522]: DEBUG nova.compute.provider_tree [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1204.382951] env[62522]: DEBUG nova.compute.manager [req-d24b132a-6f13-4935-9678-99c4bd5b3ab2 req-6086938c-dfdc-48f7-b8e1-698947bfe819 service nova] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Received event network-changed-cf4b3978-2fa2-4182-9422-abf29faafcf6 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1204.382951] env[62522]: DEBUG nova.compute.manager [req-d24b132a-6f13-4935-9678-99c4bd5b3ab2 req-6086938c-dfdc-48f7-b8e1-698947bfe819 service nova] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Refreshing instance network info cache due to event network-changed-cf4b3978-2fa2-4182-9422-abf29faafcf6. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1204.383088] env[62522]: DEBUG oslo_concurrency.lockutils [req-d24b132a-6f13-4935-9678-99c4bd5b3ab2 req-6086938c-dfdc-48f7-b8e1-698947bfe819 service nova] Acquiring lock "refresh_cache-27f4b976-7dff-49b0-9b00-7515cb976e72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1204.383237] env[62522]: DEBUG oslo_concurrency.lockutils [req-d24b132a-6f13-4935-9678-99c4bd5b3ab2 req-6086938c-dfdc-48f7-b8e1-698947bfe819 service nova] Acquired lock "refresh_cache-27f4b976-7dff-49b0-9b00-7515cb976e72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1204.383429] env[62522]: DEBUG nova.network.neutron [req-d24b132a-6f13-4935-9678-99c4bd5b3ab2 req-6086938c-dfdc-48f7-b8e1-698947bfe819 service nova] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Refreshing network info cache for port cf4b3978-2fa2-4182-9422-abf29faafcf6 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1204.717840] env[62522]: DEBUG oslo_vmware.api [None req-7ab9198f-4736-4e63-93e0-f15774b96630 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416351, 'name': ReconfigVM_Task, 'duration_secs': 0.963907} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.718171] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ab9198f-4736-4e63-93e0-f15774b96630 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489849', 'volume_id': '894e4909-b283-41ec-bddf-7ed9bff284d1', 'name': 'volume-894e4909-b283-41ec-bddf-7ed9bff284d1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f3894644-eb7e-4a6d-9029-4cd30466d6f8', 'attached_at': '', 'detached_at': '', 'volume_id': '894e4909-b283-41ec-bddf-7ed9bff284d1', 'serial': '894e4909-b283-41ec-bddf-7ed9bff284d1'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1204.787881] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: a10c4dee-4490-445a-bea2-9f8ef5425d15] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1204.801798] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416354, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.858824] env[62522]: INFO nova.compute.manager [-] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Took 1.31 seconds to deallocate network for instance. [ 1204.881772] env[62522]: DEBUG nova.scheduler.client.report [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1205.079106] env[62522]: DEBUG nova.network.neutron [req-d24b132a-6f13-4935-9678-99c4bd5b3ab2 req-6086938c-dfdc-48f7-b8e1-698947bfe819 service nova] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Updated VIF entry in instance network info cache for port cf4b3978-2fa2-4182-9422-abf29faafcf6. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1205.079495] env[62522]: DEBUG nova.network.neutron [req-d24b132a-6f13-4935-9678-99c4bd5b3ab2 req-6086938c-dfdc-48f7-b8e1-698947bfe819 service nova] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Updating instance_info_cache with network_info: [{"id": "cf4b3978-2fa2-4182-9422-abf29faafcf6", "address": "fa:16:3e:74:26:e7", "network": {"id": "e3153dad-6ab7-45a3-bda4-5ebbd95258f4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-521501740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab9d5d3c27d4c218b88e4a029300a66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf4b3978-2f", "ovs_interfaceid": "cf4b3978-2fa2-4182-9422-abf29faafcf6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1205.257513] env[62522]: DEBUG nova.objects.instance [None req-7ab9198f-4736-4e63-93e0-f15774b96630 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lazy-loading 'flavor' on Instance uuid f3894644-eb7e-4a6d-9029-4cd30466d6f8 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1205.291011] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: bf44e269-0297-473e-b6ce-04a40d0ec1b4] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1205.303302] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416354, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.365962] env[62522]: DEBUG oslo_concurrency.lockutils [None req-61d1385d-424d-4036-833e-8df1ef7c6dda tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1205.388199] env[62522]: DEBUG oslo_concurrency.lockutils [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.233s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1205.388701] env[62522]: DEBUG nova.compute.manager [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1205.391093] env[62522]: DEBUG oslo_concurrency.lockutils [None req-61d1385d-424d-4036-833e-8df1ef7c6dda tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.025s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1205.391341] env[62522]: DEBUG nova.objects.instance [None req-61d1385d-424d-4036-833e-8df1ef7c6dda tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lazy-loading 'resources' on Instance uuid 08d7e318-ea68-4807-a300-ee4a7993647d {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1205.581903] env[62522]: DEBUG oslo_concurrency.lockutils [req-d24b132a-6f13-4935-9678-99c4bd5b3ab2 req-6086938c-dfdc-48f7-b8e1-698947bfe819 service nova] Releasing lock "refresh_cache-27f4b976-7dff-49b0-9b00-7515cb976e72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1205.582248] env[62522]: DEBUG nova.compute.manager [req-d24b132a-6f13-4935-9678-99c4bd5b3ab2 req-6086938c-dfdc-48f7-b8e1-698947bfe819 service nova] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Received event network-vif-deleted-2eb2d7ef-0c29-4fda-947c-b02485470817 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1205.794051] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 3c4c395c-0625-4569-990d-e2d4ad162c14] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1205.805428] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416354, 'name': CreateVM_Task, 'duration_secs': 1.426092} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.805600] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1205.806294] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1205.806483] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1205.806774] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1205.807040] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66e15f1c-d897-44ae-84e9-5952a5cf7bff {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.811513] env[62522]: DEBUG oslo_vmware.api [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1205.811513] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52abfbc5-0140-8d1b-671f-1fa6f4c4e1ce" [ 1205.811513] env[62522]: _type = "Task" [ 1205.811513] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.818878] env[62522]: DEBUG oslo_vmware.api [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52abfbc5-0140-8d1b-671f-1fa6f4c4e1ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.894631] env[62522]: DEBUG nova.compute.utils [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1205.898882] env[62522]: DEBUG nova.compute.manager [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1205.898882] env[62522]: DEBUG nova.network.neutron [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1205.938094] env[62522]: DEBUG nova.policy [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f26eeb125397426baca60d80d635c4b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a06421250694a98b13ff34ad816dc75', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1206.004226] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52a1ba6a-04f9-49d0-97e0-1b327b9d0750 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.011793] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1d87783-4551-497d-9211-96e47d2790d2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.042213] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-631ef873-214b-4c10-8a82-41d2a2ac4a16 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.048940] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fea57f11-ed88-43ed-8565-e688fded7955 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.061851] env[62522]: DEBUG nova.compute.provider_tree [None req-61d1385d-424d-4036-833e-8df1ef7c6dda tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1206.175687] env[62522]: DEBUG nova.network.neutron [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Successfully created port: dd7e0f4f-dfde-45d0-9cfc-335366201143 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1206.264175] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7ab9198f-4736-4e63-93e0-f15774b96630 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.727s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1206.300592] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: ebca687d-4de7-4fd6-99fb-b4f0154abe9c] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1206.322393] env[62522]: DEBUG oslo_vmware.api [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52abfbc5-0140-8d1b-671f-1fa6f4c4e1ce, 'name': SearchDatastore_Task, 'duration_secs': 0.04594} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.322698] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1206.322927] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1206.323175] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1206.323560] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1206.323560] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1206.324319] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-28f9a532-67db-4292-9faf-43da1913e40b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.332822] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1206.333008] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1206.333947] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f77ef37-3bcd-4742-89d1-850d2a2cb707 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.339465] env[62522]: DEBUG oslo_vmware.api [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1206.339465] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a3d23a-2b04-72a7-7346-1b88732ec503" [ 1206.339465] env[62522]: _type = "Task" [ 1206.339465] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.347010] env[62522]: DEBUG oslo_vmware.api [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a3d23a-2b04-72a7-7346-1b88732ec503, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.399568] env[62522]: DEBUG nova.compute.manager [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1206.565372] env[62522]: DEBUG nova.scheduler.client.report [None req-61d1385d-424d-4036-833e-8df1ef7c6dda tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1206.803912] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: e813e7da-fd2c-4f10-b2f3-1e2b5c153a19] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1206.849956] env[62522]: DEBUG oslo_vmware.api [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a3d23a-2b04-72a7-7346-1b88732ec503, 'name': SearchDatastore_Task, 'duration_secs': 0.008661} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.850358] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec43a722-b973-4215-a5a1-f6bb18f6dec6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.856321] env[62522]: DEBUG oslo_vmware.api [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1206.856321] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]524d5811-a1e1-c919-f288-c3cf31856632" [ 1206.856321] env[62522]: _type = "Task" [ 1206.856321] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.864470] env[62522]: DEBUG oslo_vmware.api [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]524d5811-a1e1-c919-f288-c3cf31856632, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.933248] env[62522]: DEBUG oslo_concurrency.lockutils [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquiring lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1206.933532] env[62522]: DEBUG oslo_concurrency.lockutils [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1206.933743] env[62522]: DEBUG oslo_concurrency.lockutils [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquiring lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1206.933926] env[62522]: DEBUG oslo_concurrency.lockutils [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1206.934106] env[62522]: DEBUG oslo_concurrency.lockutils [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1206.936359] env[62522]: INFO nova.compute.manager [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Terminating instance [ 1207.072377] env[62522]: DEBUG oslo_concurrency.lockutils [None req-61d1385d-424d-4036-833e-8df1ef7c6dda tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.681s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1207.098085] env[62522]: INFO nova.scheduler.client.report [None req-61d1385d-424d-4036-833e-8df1ef7c6dda tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Deleted allocations for instance 08d7e318-ea68-4807-a300-ee4a7993647d [ 1207.307465] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: c1fd078c-61d4-4c0f-8c49-0f56a926a087] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1207.366842] env[62522]: DEBUG oslo_vmware.api [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]524d5811-a1e1-c919-f288-c3cf31856632, 'name': SearchDatastore_Task, 'duration_secs': 0.009487} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.367117] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1207.367380] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 27f4b976-7dff-49b0-9b00-7515cb976e72/27f4b976-7dff-49b0-9b00-7515cb976e72.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1207.367693] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1a975007-e62c-44aa-a118-e5a606790024 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.375027] env[62522]: DEBUG oslo_vmware.api [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1207.375027] env[62522]: value = "task-2416355" [ 1207.375027] env[62522]: _type = "Task" [ 1207.375027] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.382795] env[62522]: DEBUG oslo_vmware.api [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416355, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.408675] env[62522]: DEBUG nova.compute.manager [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1207.436157] env[62522]: DEBUG nova.virt.hardware [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1207.436450] env[62522]: DEBUG nova.virt.hardware [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1207.437033] env[62522]: DEBUG nova.virt.hardware [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1207.437033] env[62522]: DEBUG nova.virt.hardware [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1207.437033] env[62522]: DEBUG nova.virt.hardware [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1207.437215] env[62522]: DEBUG nova.virt.hardware [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1207.437386] env[62522]: DEBUG nova.virt.hardware [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1207.437489] env[62522]: DEBUG nova.virt.hardware [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1207.437704] env[62522]: DEBUG nova.virt.hardware [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1207.437857] env[62522]: DEBUG nova.virt.hardware [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1207.438129] env[62522]: DEBUG nova.virt.hardware [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1207.439099] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c564160b-c2be-41b5-af2b-5598953e710d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.442346] env[62522]: DEBUG nova.compute.manager [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1207.442346] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1207.443642] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-028d551c-2149-4ad6-a75f-fb2a37166b76 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.452759] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-458538b4-52ed-4ae9-a706-efc4a2ca822b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.456598] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1207.456832] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9df174bc-e994-4ef0-9af6-cc61cbd40d33 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.469844] env[62522]: DEBUG oslo_vmware.api [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 1207.469844] env[62522]: value = "task-2416356" [ 1207.469844] env[62522]: _type = "Task" [ 1207.469844] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.477855] env[62522]: DEBUG oslo_vmware.api [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416356, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.608244] env[62522]: DEBUG oslo_concurrency.lockutils [None req-61d1385d-424d-4036-833e-8df1ef7c6dda tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "08d7e318-ea68-4807-a300-ee4a7993647d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.171s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1207.760350] env[62522]: DEBUG nova.compute.manager [req-291f7e71-dce7-46b0-9274-dab6c1120c63 req-5969dbab-7c8d-4147-9ac7-d34b082a663c service nova] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Received event network-vif-plugged-dd7e0f4f-dfde-45d0-9cfc-335366201143 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1207.760682] env[62522]: DEBUG oslo_concurrency.lockutils [req-291f7e71-dce7-46b0-9274-dab6c1120c63 req-5969dbab-7c8d-4147-9ac7-d34b082a663c service nova] Acquiring lock "bbb8ba81-9fed-419c-b2f9-ac5baaac3b88-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1207.762435] env[62522]: DEBUG oslo_concurrency.lockutils [req-291f7e71-dce7-46b0-9274-dab6c1120c63 req-5969dbab-7c8d-4147-9ac7-d34b082a663c service nova] Lock "bbb8ba81-9fed-419c-b2f9-ac5baaac3b88-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1207.762591] env[62522]: DEBUG oslo_concurrency.lockutils [req-291f7e71-dce7-46b0-9274-dab6c1120c63 req-5969dbab-7c8d-4147-9ac7-d34b082a663c service nova] Lock "bbb8ba81-9fed-419c-b2f9-ac5baaac3b88-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1207.762831] env[62522]: DEBUG nova.compute.manager [req-291f7e71-dce7-46b0-9274-dab6c1120c63 req-5969dbab-7c8d-4147-9ac7-d34b082a663c service nova] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] No waiting events found dispatching network-vif-plugged-dd7e0f4f-dfde-45d0-9cfc-335366201143 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1207.763116] env[62522]: WARNING nova.compute.manager [req-291f7e71-dce7-46b0-9274-dab6c1120c63 req-5969dbab-7c8d-4147-9ac7-d34b082a663c service nova] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Received unexpected event network-vif-plugged-dd7e0f4f-dfde-45d0-9cfc-335366201143 for instance with vm_state building and task_state spawning. [ 1207.810477] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: cd69a052-369b-4809-baf0-a1aec44f4ab5] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1207.881556] env[62522]: DEBUG nova.network.neutron [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Successfully updated port: dd7e0f4f-dfde-45d0-9cfc-335366201143 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1207.886237] env[62522]: DEBUG oslo_vmware.api [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416355, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.474855} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.886511] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 27f4b976-7dff-49b0-9b00-7515cb976e72/27f4b976-7dff-49b0-9b00-7515cb976e72.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1207.886749] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1207.887014] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bcc81a8d-3258-4f57-9c98-88f8b91f47ac {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.894877] env[62522]: DEBUG oslo_vmware.api [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1207.894877] env[62522]: value = "task-2416357" [ 1207.894877] env[62522]: _type = "Task" [ 1207.894877] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.904985] env[62522]: DEBUG oslo_vmware.api [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416357, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.981024] env[62522]: DEBUG oslo_vmware.api [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416356, 'name': PowerOffVM_Task, 'duration_secs': 0.225389} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.981024] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1207.981024] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1207.981024] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b9c66fd5-9502-40cd-8346-4c9b64e5cf9d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.048514] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1208.048741] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1208.048922] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Deleting the datastore file [datastore2] f3894644-eb7e-4a6d-9029-4cd30466d6f8 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1208.049210] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c3dd2a91-4748-42ce-8198-b3c490e34830 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.055821] env[62522]: DEBUG oslo_vmware.api [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for the task: (returnval){ [ 1208.055821] env[62522]: value = "task-2416359" [ 1208.055821] env[62522]: _type = "Task" [ 1208.055821] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.063604] env[62522]: DEBUG oslo_vmware.api [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416359, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.314059] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: c181ce48-9fe2-4400-9047-f8b5a7159dd3] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1208.384198] env[62522]: DEBUG oslo_concurrency.lockutils [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "refresh_cache-bbb8ba81-9fed-419c-b2f9-ac5baaac3b88" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1208.384379] env[62522]: DEBUG oslo_concurrency.lockutils [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquired lock "refresh_cache-bbb8ba81-9fed-419c-b2f9-ac5baaac3b88" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1208.384600] env[62522]: DEBUG nova.network.neutron [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1208.404954] env[62522]: DEBUG oslo_vmware.api [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416357, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061548} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.405251] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1208.405975] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41edad01-3302-4515-9bdd-08ec0331c98e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.432350] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] 27f4b976-7dff-49b0-9b00-7515cb976e72/27f4b976-7dff-49b0-9b00-7515cb976e72.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1208.432655] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5f5636e-3cb2-4cf3-a191-abd73c379706 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.453071] env[62522]: DEBUG oslo_vmware.api [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1208.453071] env[62522]: value = "task-2416360" [ 1208.453071] env[62522]: _type = "Task" [ 1208.453071] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.460777] env[62522]: DEBUG oslo_vmware.api [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416360, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.565322] env[62522]: DEBUG oslo_vmware.api [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Task: {'id': task-2416359, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.420686} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.565793] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1208.565793] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1208.565998] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1208.566110] env[62522]: INFO nova.compute.manager [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1208.566354] env[62522]: DEBUG oslo.service.loopingcall [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1208.566555] env[62522]: DEBUG nova.compute.manager [-] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1208.566644] env[62522]: DEBUG nova.network.neutron [-] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1208.817374] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 879354d3-7423-41e2-93f6-0d8d3a120170] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1208.964224] env[62522]: DEBUG oslo_vmware.api [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416360, 'name': ReconfigVM_Task, 'duration_secs': 0.269847} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.966372] env[62522]: DEBUG nova.network.neutron [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1208.968485] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Reconfigured VM instance instance-0000006b to attach disk [datastore1] 27f4b976-7dff-49b0-9b00-7515cb976e72/27f4b976-7dff-49b0-9b00-7515cb976e72.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1208.969136] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0f8bdb93-97d0-4687-a172-74faf367f536 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.977154] env[62522]: DEBUG oslo_vmware.api [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1208.977154] env[62522]: value = "task-2416361" [ 1208.977154] env[62522]: _type = "Task" [ 1208.977154] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.992624] env[62522]: DEBUG oslo_vmware.api [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416361, 'name': Rename_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.299083] env[62522]: DEBUG nova.network.neutron [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Updating instance_info_cache with network_info: [{"id": "dd7e0f4f-dfde-45d0-9cfc-335366201143", "address": "fa:16:3e:44:57:86", "network": {"id": "21d0ca22-5509-4f9b-b167-f9fde2dac808", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-547933771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a06421250694a98b13ff34ad816dc75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd7e0f4f-df", "ovs_interfaceid": "dd7e0f4f-dfde-45d0-9cfc-335366201143", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1209.320872] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1209.320872] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Cleaning up deleted instances with incomplete migration {{(pid=62522) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11783}} [ 1209.487827] env[62522]: DEBUG oslo_vmware.api [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416361, 'name': Rename_Task, 'duration_secs': 0.138556} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.488461] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1209.488761] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b935ed0a-0639-42da-bc81-994ccae1c3c8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.494989] env[62522]: DEBUG oslo_vmware.api [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1209.494989] env[62522]: value = "task-2416362" [ 1209.494989] env[62522]: _type = "Task" [ 1209.494989] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.503280] env[62522]: DEBUG oslo_vmware.api [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416362, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.754762] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "7406a1a4-a342-475b-ad02-6a29f7c487ee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1209.755028] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "7406a1a4-a342-475b-ad02-6a29f7c487ee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1209.794378] env[62522]: DEBUG nova.compute.manager [req-14019e8a-a8d9-4f62-b433-8a790060dcb0 req-a508dc66-d2f9-49d9-81b5-0e46ced09e88 service nova] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Received event network-changed-dd7e0f4f-dfde-45d0-9cfc-335366201143 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1209.795062] env[62522]: DEBUG nova.compute.manager [req-14019e8a-a8d9-4f62-b433-8a790060dcb0 req-a508dc66-d2f9-49d9-81b5-0e46ced09e88 service nova] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Refreshing instance network info cache due to event network-changed-dd7e0f4f-dfde-45d0-9cfc-335366201143. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1209.795281] env[62522]: DEBUG oslo_concurrency.lockutils [req-14019e8a-a8d9-4f62-b433-8a790060dcb0 req-a508dc66-d2f9-49d9-81b5-0e46ced09e88 service nova] Acquiring lock "refresh_cache-bbb8ba81-9fed-419c-b2f9-ac5baaac3b88" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1209.801555] env[62522]: DEBUG oslo_concurrency.lockutils [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Releasing lock "refresh_cache-bbb8ba81-9fed-419c-b2f9-ac5baaac3b88" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1209.801876] env[62522]: DEBUG nova.compute.manager [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Instance network_info: |[{"id": "dd7e0f4f-dfde-45d0-9cfc-335366201143", "address": "fa:16:3e:44:57:86", "network": {"id": "21d0ca22-5509-4f9b-b167-f9fde2dac808", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-547933771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a06421250694a98b13ff34ad816dc75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd7e0f4f-df", "ovs_interfaceid": "dd7e0f4f-dfde-45d0-9cfc-335366201143", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1209.802180] env[62522]: DEBUG oslo_concurrency.lockutils [req-14019e8a-a8d9-4f62-b433-8a790060dcb0 req-a508dc66-d2f9-49d9-81b5-0e46ced09e88 service nova] Acquired lock "refresh_cache-bbb8ba81-9fed-419c-b2f9-ac5baaac3b88" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1209.802384] env[62522]: DEBUG nova.network.neutron [req-14019e8a-a8d9-4f62-b433-8a790060dcb0 req-a508dc66-d2f9-49d9-81b5-0e46ced09e88 service nova] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Refreshing network info cache for port dd7e0f4f-dfde-45d0-9cfc-335366201143 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1209.803761] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:44:57:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24144f5a-050a-4f1e-8d8c-774dc16dc791', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dd7e0f4f-dfde-45d0-9cfc-335366201143', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1209.811465] env[62522]: DEBUG oslo.service.loopingcall [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1209.814596] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1209.815059] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1affa0aa-73ec-46e2-bd71-1275099243d5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.836876] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1209.836876] env[62522]: value = "task-2416363" [ 1209.836876] env[62522]: _type = "Task" [ 1209.836876] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.845369] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416363, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.877722] env[62522]: DEBUG nova.network.neutron [-] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1210.005235] env[62522]: DEBUG oslo_vmware.api [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416362, 'name': PowerOnVM_Task, 'duration_secs': 0.5059} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.005651] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1210.005797] env[62522]: INFO nova.compute.manager [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Took 8.37 seconds to spawn the instance on the hypervisor. [ 1210.005929] env[62522]: DEBUG nova.compute.manager [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1210.006771] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d201b64-742b-4050-8e13-55fd00698d16 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.072466] env[62522]: DEBUG nova.network.neutron [req-14019e8a-a8d9-4f62-b433-8a790060dcb0 req-a508dc66-d2f9-49d9-81b5-0e46ced09e88 service nova] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Updated VIF entry in instance network info cache for port dd7e0f4f-dfde-45d0-9cfc-335366201143. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1210.072875] env[62522]: DEBUG nova.network.neutron [req-14019e8a-a8d9-4f62-b433-8a790060dcb0 req-a508dc66-d2f9-49d9-81b5-0e46ced09e88 service nova] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Updating instance_info_cache with network_info: [{"id": "dd7e0f4f-dfde-45d0-9cfc-335366201143", "address": "fa:16:3e:44:57:86", "network": {"id": "21d0ca22-5509-4f9b-b167-f9fde2dac808", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-547933771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a06421250694a98b13ff34ad816dc75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd7e0f4f-df", "ovs_interfaceid": "dd7e0f4f-dfde-45d0-9cfc-335366201143", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1210.257120] env[62522]: DEBUG nova.compute.manager [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1210.348470] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416363, 'name': CreateVM_Task, 'duration_secs': 0.339768} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.348691] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1210.349375] env[62522]: DEBUG oslo_concurrency.lockutils [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1210.349588] env[62522]: DEBUG oslo_concurrency.lockutils [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1210.349940] env[62522]: DEBUG oslo_concurrency.lockutils [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1210.350215] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-063fb681-cc5a-436e-b2ea-d8afcf7c43c5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.354419] env[62522]: DEBUG oslo_vmware.api [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1210.354419] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522d1cd8-29be-395f-393b-84dbe6b62d07" [ 1210.354419] env[62522]: _type = "Task" [ 1210.354419] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.361807] env[62522]: DEBUG oslo_vmware.api [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522d1cd8-29be-395f-393b-84dbe6b62d07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.379365] env[62522]: INFO nova.compute.manager [-] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Took 1.81 seconds to deallocate network for instance. [ 1210.525311] env[62522]: INFO nova.compute.manager [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Took 13.39 seconds to build instance. [ 1210.576527] env[62522]: DEBUG oslo_concurrency.lockutils [req-14019e8a-a8d9-4f62-b433-8a790060dcb0 req-a508dc66-d2f9-49d9-81b5-0e46ced09e88 service nova] Releasing lock "refresh_cache-bbb8ba81-9fed-419c-b2f9-ac5baaac3b88" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1210.576791] env[62522]: DEBUG nova.compute.manager [req-14019e8a-a8d9-4f62-b433-8a790060dcb0 req-a508dc66-d2f9-49d9-81b5-0e46ced09e88 service nova] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Received event network-vif-deleted-2d7b03e9-5319-496c-b990-7663aa7aa371 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1210.576969] env[62522]: INFO nova.compute.manager [req-14019e8a-a8d9-4f62-b433-8a790060dcb0 req-a508dc66-d2f9-49d9-81b5-0e46ced09e88 service nova] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Neutron deleted interface 2d7b03e9-5319-496c-b990-7663aa7aa371; detaching it from the instance and deleting it from the info cache [ 1210.577156] env[62522]: DEBUG nova.network.neutron [req-14019e8a-a8d9-4f62-b433-8a790060dcb0 req-a508dc66-d2f9-49d9-81b5-0e46ced09e88 service nova] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1210.780570] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1210.780873] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1210.782572] env[62522]: INFO nova.compute.claims [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1210.864887] env[62522]: DEBUG oslo_vmware.api [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522d1cd8-29be-395f-393b-84dbe6b62d07, 'name': SearchDatastore_Task, 'duration_secs': 0.010851} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.865242] env[62522]: DEBUG oslo_concurrency.lockutils [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1210.865438] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1210.865668] env[62522]: DEBUG oslo_concurrency.lockutils [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1210.865813] env[62522]: DEBUG oslo_concurrency.lockutils [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1210.865991] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1210.866265] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d15fbf9d-61cc-47ef-a14a-0e763dabb6e3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.874929] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1210.875109] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1210.875819] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-feaf9789-e8e2-4a9b-b434-ce8050ca5c92 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.880730] env[62522]: DEBUG oslo_vmware.api [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1210.880730] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]527815f8-7ac0-fe49-2138-c42355162bbc" [ 1210.880730] env[62522]: _type = "Task" [ 1210.880730] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.889441] env[62522]: DEBUG oslo_concurrency.lockutils [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1210.889704] env[62522]: DEBUG oslo_vmware.api [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]527815f8-7ac0-fe49-2138-c42355162bbc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.027041] env[62522]: DEBUG oslo_concurrency.lockutils [None req-cd9237b5-ee8c-4136-b744-14e826c4474b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "27f4b976-7dff-49b0-9b00-7515cb976e72" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.908s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1211.079770] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b627391a-23f7-4111-88d4-934a32aecb24 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.089610] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8d00290-c3a4-43b6-80a7-c1cf92e4fd9c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.118473] env[62522]: DEBUG nova.compute.manager [req-14019e8a-a8d9-4f62-b433-8a790060dcb0 req-a508dc66-d2f9-49d9-81b5-0e46ced09e88 service nova] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Detach interface failed, port_id=2d7b03e9-5319-496c-b990-7663aa7aa371, reason: Instance f3894644-eb7e-4a6d-9029-4cd30466d6f8 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1211.120896] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eed40bb7-c3db-4459-82fc-2e43b83066c3 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "1c6451e0-2fae-4d2b-86d7-86f9537a6259" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1211.121134] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eed40bb7-c3db-4459-82fc-2e43b83066c3 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "1c6451e0-2fae-4d2b-86d7-86f9537a6259" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1211.392051] env[62522]: DEBUG oslo_vmware.api [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]527815f8-7ac0-fe49-2138-c42355162bbc, 'name': SearchDatastore_Task, 'duration_secs': 0.009016} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.392498] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4020f4ca-2466-4df1-9a36-72d955a73c3c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.397504] env[62522]: DEBUG oslo_vmware.api [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1211.397504] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5285cde4-aa6b-7480-5661-8bdedf880abe" [ 1211.397504] env[62522]: _type = "Task" [ 1211.397504] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.404665] env[62522]: DEBUG oslo_vmware.api [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5285cde4-aa6b-7480-5661-8bdedf880abe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.624137] env[62522]: DEBUG nova.compute.utils [None req-eed40bb7-c3db-4459-82fc-2e43b83066c3 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1211.817901] env[62522]: DEBUG nova.compute.manager [req-a36e938c-656b-4384-a19b-9f7f0da1e575 req-0e61b81f-9bcc-4d11-bddc-444b3d635dfb service nova] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Received event network-changed-cf4b3978-2fa2-4182-9422-abf29faafcf6 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1211.818128] env[62522]: DEBUG nova.compute.manager [req-a36e938c-656b-4384-a19b-9f7f0da1e575 req-0e61b81f-9bcc-4d11-bddc-444b3d635dfb service nova] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Refreshing instance network info cache due to event network-changed-cf4b3978-2fa2-4182-9422-abf29faafcf6. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1211.818348] env[62522]: DEBUG oslo_concurrency.lockutils [req-a36e938c-656b-4384-a19b-9f7f0da1e575 req-0e61b81f-9bcc-4d11-bddc-444b3d635dfb service nova] Acquiring lock "refresh_cache-27f4b976-7dff-49b0-9b00-7515cb976e72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1211.818492] env[62522]: DEBUG oslo_concurrency.lockutils [req-a36e938c-656b-4384-a19b-9f7f0da1e575 req-0e61b81f-9bcc-4d11-bddc-444b3d635dfb service nova] Acquired lock "refresh_cache-27f4b976-7dff-49b0-9b00-7515cb976e72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1211.818701] env[62522]: DEBUG nova.network.neutron [req-a36e938c-656b-4384-a19b-9f7f0da1e575 req-0e61b81f-9bcc-4d11-bddc-444b3d635dfb service nova] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Refreshing network info cache for port cf4b3978-2fa2-4182-9422-abf29faafcf6 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1211.826099] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1211.893614] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e58ee88e-1fbf-4f56-9a92-44808e9de548 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.903498] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f7938c9-b995-4c09-ba4e-56316147c8b8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.911767] env[62522]: DEBUG oslo_vmware.api [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5285cde4-aa6b-7480-5661-8bdedf880abe, 'name': SearchDatastore_Task, 'duration_secs': 0.008943} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.934632] env[62522]: DEBUG oslo_concurrency.lockutils [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1211.934940] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] bbb8ba81-9fed-419c-b2f9-ac5baaac3b88/bbb8ba81-9fed-419c-b2f9-ac5baaac3b88.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1211.935359] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1ceb019f-edec-4003-99a7-8621adc96485 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.937958] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a79ab5ca-b0d3-4e45-b735-e5f7523fe0bc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.946194] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ce2124-7dc4-46c9-bad1-30ea94d2425b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.949684] env[62522]: DEBUG oslo_vmware.api [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1211.949684] env[62522]: value = "task-2416364" [ 1211.949684] env[62522]: _type = "Task" [ 1211.949684] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.961385] env[62522]: DEBUG nova.compute.provider_tree [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1211.966676] env[62522]: DEBUG oslo_vmware.api [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416364, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.128605] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eed40bb7-c3db-4459-82fc-2e43b83066c3 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "1c6451e0-2fae-4d2b-86d7-86f9537a6259" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1212.331157] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1212.459258] env[62522]: DEBUG oslo_vmware.api [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416364, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.448662} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.459532] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] bbb8ba81-9fed-419c-b2f9-ac5baaac3b88/bbb8ba81-9fed-419c-b2f9-ac5baaac3b88.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1212.459769] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1212.460044] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-347020bc-a29a-4ff6-a52d-1f7704572888 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.464848] env[62522]: DEBUG nova.scheduler.client.report [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1212.471148] env[62522]: DEBUG oslo_vmware.api [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1212.471148] env[62522]: value = "task-2416365" [ 1212.471148] env[62522]: _type = "Task" [ 1212.471148] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.481074] env[62522]: DEBUG oslo_vmware.api [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416365, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.606143] env[62522]: DEBUG nova.network.neutron [req-a36e938c-656b-4384-a19b-9f7f0da1e575 req-0e61b81f-9bcc-4d11-bddc-444b3d635dfb service nova] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Updated VIF entry in instance network info cache for port cf4b3978-2fa2-4182-9422-abf29faafcf6. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1212.606599] env[62522]: DEBUG nova.network.neutron [req-a36e938c-656b-4384-a19b-9f7f0da1e575 req-0e61b81f-9bcc-4d11-bddc-444b3d635dfb service nova] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Updating instance_info_cache with network_info: [{"id": "cf4b3978-2fa2-4182-9422-abf29faafcf6", "address": "fa:16:3e:74:26:e7", "network": {"id": "e3153dad-6ab7-45a3-bda4-5ebbd95258f4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-521501740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab9d5d3c27d4c218b88e4a029300a66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf4b3978-2f", "ovs_interfaceid": "cf4b3978-2fa2-4182-9422-abf29faafcf6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1212.836025] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Getting list of instances from cluster (obj){ [ 1212.836025] env[62522]: value = "domain-c8" [ 1212.836025] env[62522]: _type = "ClusterComputeResource" [ 1212.836025] env[62522]: } {{(pid=62522) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1212.837389] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13b8a21f-9a9f-4e12-872f-fe61406e9fd6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.853630] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Got total of 6 instances {{(pid=62522) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1212.853787] env[62522]: WARNING nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] While synchronizing instance power states, found 8 instances in the database and 6 instances on the hypervisor. [ 1212.853917] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Triggering sync for uuid b31195c2-29f4-475c-baa7-fcb4791b7278 {{(pid=62522) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1212.854123] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Triggering sync for uuid f3894644-eb7e-4a6d-9029-4cd30466d6f8 {{(pid=62522) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1212.854283] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Triggering sync for uuid 1c6451e0-2fae-4d2b-86d7-86f9537a6259 {{(pid=62522) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1212.854447] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Triggering sync for uuid cb7a19f1-6093-47ee-bbbc-a75dd5423f32 {{(pid=62522) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1212.854613] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Triggering sync for uuid a4cb5c19-9087-4354-9689-a99ae8924dc1 {{(pid=62522) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1212.854771] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Triggering sync for uuid 27f4b976-7dff-49b0-9b00-7515cb976e72 {{(pid=62522) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1212.854920] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Triggering sync for uuid bbb8ba81-9fed-419c-b2f9-ac5baaac3b88 {{(pid=62522) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1212.855080] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Triggering sync for uuid 7406a1a4-a342-475b-ad02-6a29f7c487ee {{(pid=62522) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1212.855396] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "b31195c2-29f4-475c-baa7-fcb4791b7278" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.855629] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "b31195c2-29f4-475c-baa7-fcb4791b7278" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1212.855912] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.856133] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "1c6451e0-2fae-4d2b-86d7-86f9537a6259" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.856310] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "1c6451e0-2fae-4d2b-86d7-86f9537a6259" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1212.856537] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "cb7a19f1-6093-47ee-bbbc-a75dd5423f32" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.856717] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "cb7a19f1-6093-47ee-bbbc-a75dd5423f32" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1212.856931] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "a4cb5c19-9087-4354-9689-a99ae8924dc1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.857116] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "a4cb5c19-9087-4354-9689-a99ae8924dc1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1212.857338] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "27f4b976-7dff-49b0-9b00-7515cb976e72" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.857508] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "27f4b976-7dff-49b0-9b00-7515cb976e72" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1212.857729] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "bbb8ba81-9fed-419c-b2f9-ac5baaac3b88" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.857924] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "7406a1a4-a342-475b-ad02-6a29f7c487ee" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.858745] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfbb59f2-b0fd-42d7-8e4b-989398d032a5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.861709] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-441d6daa-0562-4994-a7a2-9458710402f2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.864333] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c08ee76f-5e1d-43b0-b083-45c905dc7183 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.867044] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e93384d-0f05-4a52-b8cd-dd2f52f87a8f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.869605] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a60e2b9-f64b-49a8-aeb1-346e816c8271 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.972661] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.192s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1212.973148] env[62522]: DEBUG nova.compute.manager [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1212.976228] env[62522]: DEBUG oslo_concurrency.lockutils [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.087s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1212.976416] env[62522]: DEBUG nova.objects.instance [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lazy-loading 'resources' on Instance uuid f3894644-eb7e-4a6d-9029-4cd30466d6f8 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1212.986896] env[62522]: DEBUG oslo_vmware.api [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416365, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059232} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.986896] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1212.987167] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe943192-93c7-4087-8d61-4b143296318b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.010597] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] bbb8ba81-9fed-419c-b2f9-ac5baaac3b88/bbb8ba81-9fed-419c-b2f9-ac5baaac3b88.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1213.011624] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-68fb1250-4ad6-4ecc-91c0-41c616935bf2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.033388] env[62522]: DEBUG oslo_vmware.api [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1213.033388] env[62522]: value = "task-2416366" [ 1213.033388] env[62522]: _type = "Task" [ 1213.033388] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.041508] env[62522]: DEBUG oslo_vmware.api [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416366, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.109302] env[62522]: DEBUG oslo_concurrency.lockutils [req-a36e938c-656b-4384-a19b-9f7f0da1e575 req-0e61b81f-9bcc-4d11-bddc-444b3d635dfb service nova] Releasing lock "refresh_cache-27f4b976-7dff-49b0-9b00-7515cb976e72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1213.208837] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eed40bb7-c3db-4459-82fc-2e43b83066c3 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "1c6451e0-2fae-4d2b-86d7-86f9537a6259" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1213.388704] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "cb7a19f1-6093-47ee-bbbc-a75dd5423f32" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.532s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1213.389116] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "b31195c2-29f4-475c-baa7-fcb4791b7278" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.533s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1213.389434] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "a4cb5c19-9087-4354-9689-a99ae8924dc1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.532s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1213.396030] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "27f4b976-7dff-49b0-9b00-7515cb976e72" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.538s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1213.396398] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "1c6451e0-2fae-4d2b-86d7-86f9537a6259" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.540s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1213.396628] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eed40bb7-c3db-4459-82fc-2e43b83066c3 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "1c6451e0-2fae-4d2b-86d7-86f9537a6259" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.188s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1213.396869] env[62522]: INFO nova.compute.manager [None req-eed40bb7-c3db-4459-82fc-2e43b83066c3 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Attaching volume 790e2a55-e79e-4d14-9cf8-bed0cf3d0293 to /dev/sdb [ 1213.427409] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-828341a6-f4f3-4a4d-911e-f1fc7db5dbe1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.434776] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05d0ae49-22a4-486b-8bfb-c6e204e6734f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.448529] env[62522]: DEBUG nova.virt.block_device [None req-eed40bb7-c3db-4459-82fc-2e43b83066c3 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Updating existing volume attachment record: 875d85ab-c4f1-4a7a-9dba-a89d27e8805f {{(pid=62522) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1213.481941] env[62522]: DEBUG nova.compute.utils [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1213.485974] env[62522]: DEBUG nova.compute.manager [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1213.486342] env[62522]: DEBUG nova.network.neutron [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1213.545871] env[62522]: DEBUG oslo_vmware.api [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416366, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.557642] env[62522]: DEBUG nova.policy [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3a4ba3a3d3a34495b7a7e0618577d60f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '071dd4c295a54e388099d5bf0f4e300b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1213.602843] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39788cd9-2648-4a12-b5d1-f1f167ebd3aa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.611658] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c25845eb-e70d-471e-bad6-c6f0d46c93f7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.644262] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fbf3be0-3d07-4c9c-815f-113fde9baa83 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.651523] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-862fc001-c780-46d4-bfd6-64f27abb6aa7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.665179] env[62522]: DEBUG nova.compute.provider_tree [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1213.987187] env[62522]: DEBUG nova.compute.manager [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1214.043989] env[62522]: DEBUG oslo_vmware.api [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416366, 'name': ReconfigVM_Task, 'duration_secs': 0.813075} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.044247] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Reconfigured VM instance instance-0000006c to attach disk [datastore1] bbb8ba81-9fed-419c-b2f9-ac5baaac3b88/bbb8ba81-9fed-419c-b2f9-ac5baaac3b88.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1214.044903] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d2f24bf5-8680-434b-9d51-671a2ab93d6f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.050583] env[62522]: DEBUG oslo_vmware.api [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1214.050583] env[62522]: value = "task-2416370" [ 1214.050583] env[62522]: _type = "Task" [ 1214.050583] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.057962] env[62522]: DEBUG oslo_vmware.api [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416370, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.133765] env[62522]: DEBUG nova.network.neutron [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Successfully created port: 1f2dff01-fe9d-46ea-af42-ec9d20c5ac2e {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1214.171020] env[62522]: DEBUG nova.scheduler.client.report [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1214.563219] env[62522]: DEBUG oslo_vmware.api [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416370, 'name': Rename_Task, 'duration_secs': 0.383684} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.563219] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1214.563219] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-025c918b-fce4-4d09-b784-db1e96099a27 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.569509] env[62522]: DEBUG oslo_vmware.api [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1214.569509] env[62522]: value = "task-2416371" [ 1214.569509] env[62522]: _type = "Task" [ 1214.569509] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.576979] env[62522]: DEBUG oslo_vmware.api [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416371, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.672996] env[62522]: DEBUG oslo_concurrency.lockutils [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.697s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1214.697190] env[62522]: INFO nova.scheduler.client.report [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Deleted allocations for instance f3894644-eb7e-4a6d-9029-4cd30466d6f8 [ 1214.997527] env[62522]: DEBUG nova.compute.manager [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1215.026111] env[62522]: DEBUG nova.virt.hardware [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1215.026424] env[62522]: DEBUG nova.virt.hardware [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1215.026617] env[62522]: DEBUG nova.virt.hardware [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1215.026947] env[62522]: DEBUG nova.virt.hardware [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1215.027219] env[62522]: DEBUG nova.virt.hardware [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1215.027471] env[62522]: DEBUG nova.virt.hardware [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1215.027794] env[62522]: DEBUG nova.virt.hardware [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1215.028233] env[62522]: DEBUG nova.virt.hardware [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1215.028351] env[62522]: DEBUG nova.virt.hardware [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1215.028522] env[62522]: DEBUG nova.virt.hardware [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1215.028703] env[62522]: DEBUG nova.virt.hardware [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1215.029649] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d455243-c282-449e-bb9e-1f940edcf4f9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.039581] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf6c4bc-c1bd-40ff-a53c-68b558b2c9df {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.078512] env[62522]: DEBUG oslo_vmware.api [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416371, 'name': PowerOnVM_Task, 'duration_secs': 0.492813} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.078761] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1215.078964] env[62522]: INFO nova.compute.manager [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Took 7.67 seconds to spawn the instance on the hypervisor. [ 1215.079160] env[62522]: DEBUG nova.compute.manager [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1215.079910] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ef1bda-a2ae-4eb0-ba8c-0d610845bbda {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.204849] env[62522]: DEBUG oslo_concurrency.lockutils [None req-67ef8971-eaf0-423b-8c1f-f0be2ed6ed65 tempest-AttachVolumeTestJSON-1462924921 tempest-AttachVolumeTestJSON-1462924921-project-member] Lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.271s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1215.205714] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 2.350s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1215.206070] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4a408bd8-736e-4836-b571-32f462e804aa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.215884] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d455d66-0d22-41d2-b0bf-b94add7d66b6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.548725] env[62522]: DEBUG nova.compute.manager [req-7395cdc9-380b-4b00-b178-564a8ce76283 req-7d3e3f62-8abe-4066-93ea-9ec52fb95926 service nova] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Received event network-vif-plugged-1f2dff01-fe9d-46ea-af42-ec9d20c5ac2e {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1215.549074] env[62522]: DEBUG oslo_concurrency.lockutils [req-7395cdc9-380b-4b00-b178-564a8ce76283 req-7d3e3f62-8abe-4066-93ea-9ec52fb95926 service nova] Acquiring lock "7406a1a4-a342-475b-ad02-6a29f7c487ee-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1215.549621] env[62522]: DEBUG oslo_concurrency.lockutils [req-7395cdc9-380b-4b00-b178-564a8ce76283 req-7d3e3f62-8abe-4066-93ea-9ec52fb95926 service nova] Lock "7406a1a4-a342-475b-ad02-6a29f7c487ee-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1215.549734] env[62522]: DEBUG oslo_concurrency.lockutils [req-7395cdc9-380b-4b00-b178-564a8ce76283 req-7d3e3f62-8abe-4066-93ea-9ec52fb95926 service nova] Lock "7406a1a4-a342-475b-ad02-6a29f7c487ee-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1215.549986] env[62522]: DEBUG nova.compute.manager [req-7395cdc9-380b-4b00-b178-564a8ce76283 req-7d3e3f62-8abe-4066-93ea-9ec52fb95926 service nova] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] No waiting events found dispatching network-vif-plugged-1f2dff01-fe9d-46ea-af42-ec9d20c5ac2e {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1215.550333] env[62522]: WARNING nova.compute.manager [req-7395cdc9-380b-4b00-b178-564a8ce76283 req-7d3e3f62-8abe-4066-93ea-9ec52fb95926 service nova] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Received unexpected event network-vif-plugged-1f2dff01-fe9d-46ea-af42-ec9d20c5ac2e for instance with vm_state building and task_state spawning. [ 1215.595064] env[62522]: INFO nova.compute.manager [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Took 12.46 seconds to build instance. [ 1215.690471] env[62522]: DEBUG nova.network.neutron [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Successfully updated port: 1f2dff01-fe9d-46ea-af42-ec9d20c5ac2e {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1215.746862] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "f3894644-eb7e-4a6d-9029-4cd30466d6f8" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.541s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1216.097815] env[62522]: DEBUG oslo_concurrency.lockutils [None req-51c1c823-b210-49fb-95ca-af99f999f5ba tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "bbb8ba81-9fed-419c-b2f9-ac5baaac3b88" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.972s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1216.098222] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "bbb8ba81-9fed-419c-b2f9-ac5baaac3b88" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 3.240s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1216.098413] env[62522]: INFO nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] During sync_power_state the instance has a pending task (spawning). Skip. [ 1216.098581] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "bbb8ba81-9fed-419c-b2f9-ac5baaac3b88" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1216.192890] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "refresh_cache-7406a1a4-a342-475b-ad02-6a29f7c487ee" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1216.193041] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquired lock "refresh_cache-7406a1a4-a342-475b-ad02-6a29f7c487ee" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1216.193204] env[62522]: DEBUG nova.network.neutron [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1216.618050] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2214c07d-abce-49bf-85e2-0899385f86e6 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "bbb8ba81-9fed-419c-b2f9-ac5baaac3b88" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1216.618278] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2214c07d-abce-49bf-85e2-0899385f86e6 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "bbb8ba81-9fed-419c-b2f9-ac5baaac3b88" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1216.618466] env[62522]: DEBUG nova.compute.manager [None req-2214c07d-abce-49bf-85e2-0899385f86e6 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1216.619408] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96c0916e-8b47-4423-afbd-3b7f271ac74b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.626493] env[62522]: DEBUG nova.compute.manager [None req-2214c07d-abce-49bf-85e2-0899385f86e6 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62522) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1216.627059] env[62522]: DEBUG nova.objects.instance [None req-2214c07d-abce-49bf-85e2-0899385f86e6 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lazy-loading 'flavor' on Instance uuid bbb8ba81-9fed-419c-b2f9-ac5baaac3b88 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1216.735867] env[62522]: DEBUG nova.network.neutron [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1216.893658] env[62522]: DEBUG nova.network.neutron [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Updating instance_info_cache with network_info: [{"id": "1f2dff01-fe9d-46ea-af42-ec9d20c5ac2e", "address": "fa:16:3e:08:27:40", "network": {"id": "fd993a8b-571c-4873-a5d6-4d8c60e23d38", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2084093882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "071dd4c295a54e388099d5bf0f4e300b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f2dff01-fe", "ovs_interfaceid": "1f2dff01-fe9d-46ea-af42-ec9d20c5ac2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1217.396442] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Releasing lock "refresh_cache-7406a1a4-a342-475b-ad02-6a29f7c487ee" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1217.396819] env[62522]: DEBUG nova.compute.manager [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Instance network_info: |[{"id": "1f2dff01-fe9d-46ea-af42-ec9d20c5ac2e", "address": "fa:16:3e:08:27:40", "network": {"id": "fd993a8b-571c-4873-a5d6-4d8c60e23d38", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2084093882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "071dd4c295a54e388099d5bf0f4e300b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f2dff01-fe", "ovs_interfaceid": "1f2dff01-fe9d-46ea-af42-ec9d20c5ac2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1217.397228] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:27:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd7b5f1ef-d4b9-4ec3-b047-17e4cb349d25', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1f2dff01-fe9d-46ea-af42-ec9d20c5ac2e', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1217.405147] env[62522]: DEBUG oslo.service.loopingcall [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1217.405728] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1217.405971] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a379cb0b-2ab9-4196-b368-07fb7eeb66f2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.425942] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1217.425942] env[62522]: value = "task-2416374" [ 1217.425942] env[62522]: _type = "Task" [ 1217.425942] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.433832] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416374, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.573953] env[62522]: DEBUG nova.compute.manager [req-8a16ac30-c094-4b1a-957b-15acd54d457b req-19542acc-40b4-432e-b85f-15c441215df5 service nova] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Received event network-changed-1f2dff01-fe9d-46ea-af42-ec9d20c5ac2e {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1217.574172] env[62522]: DEBUG nova.compute.manager [req-8a16ac30-c094-4b1a-957b-15acd54d457b req-19542acc-40b4-432e-b85f-15c441215df5 service nova] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Refreshing instance network info cache due to event network-changed-1f2dff01-fe9d-46ea-af42-ec9d20c5ac2e. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1217.574385] env[62522]: DEBUG oslo_concurrency.lockutils [req-8a16ac30-c094-4b1a-957b-15acd54d457b req-19542acc-40b4-432e-b85f-15c441215df5 service nova] Acquiring lock "refresh_cache-7406a1a4-a342-475b-ad02-6a29f7c487ee" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1217.574646] env[62522]: DEBUG oslo_concurrency.lockutils [req-8a16ac30-c094-4b1a-957b-15acd54d457b req-19542acc-40b4-432e-b85f-15c441215df5 service nova] Acquired lock "refresh_cache-7406a1a4-a342-475b-ad02-6a29f7c487ee" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1217.574871] env[62522]: DEBUG nova.network.neutron [req-8a16ac30-c094-4b1a-957b-15acd54d457b req-19542acc-40b4-432e-b85f-15c441215df5 service nova] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Refreshing network info cache for port 1f2dff01-fe9d-46ea-af42-ec9d20c5ac2e {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1217.634261] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2214c07d-abce-49bf-85e2-0899385f86e6 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1217.634658] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a39a400e-064f-4da8-aa2c-268b03fe2b28 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.643438] env[62522]: DEBUG oslo_vmware.api [None req-2214c07d-abce-49bf-85e2-0899385f86e6 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1217.643438] env[62522]: value = "task-2416375" [ 1217.643438] env[62522]: _type = "Task" [ 1217.643438] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.653268] env[62522]: DEBUG oslo_vmware.api [None req-2214c07d-abce-49bf-85e2-0899385f86e6 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416375, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.935527] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416374, 'name': CreateVM_Task, 'duration_secs': 0.375023} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.935706] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1217.936423] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1217.936588] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1217.936908] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1217.937180] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5cdf0d5-a4cb-48cf-9799-b3a8db21cf17 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.942121] env[62522]: DEBUG oslo_vmware.api [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1217.942121] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f60980-7b2b-655c-f941-cfebb9ec2696" [ 1217.942121] env[62522]: _type = "Task" [ 1217.942121] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.949242] env[62522]: DEBUG oslo_vmware.api [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f60980-7b2b-655c-f941-cfebb9ec2696, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.990605] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-eed40bb7-c3db-4459-82fc-2e43b83066c3 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Volume attach. Driver type: vmdk {{(pid=62522) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1217.990919] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-eed40bb7-c3db-4459-82fc-2e43b83066c3 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489859', 'volume_id': '790e2a55-e79e-4d14-9cf8-bed0cf3d0293', 'name': 'volume-790e2a55-e79e-4d14-9cf8-bed0cf3d0293', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1c6451e0-2fae-4d2b-86d7-86f9537a6259', 'attached_at': '', 'detached_at': '', 'volume_id': '790e2a55-e79e-4d14-9cf8-bed0cf3d0293', 'serial': '790e2a55-e79e-4d14-9cf8-bed0cf3d0293'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1217.991768] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3738723-18ce-41b4-b7a2-15f07f818a75 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.006810] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb5b8c33-5d0f-4eb9-92e4-cc81037ed681 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.030935] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-eed40bb7-c3db-4459-82fc-2e43b83066c3 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] volume-790e2a55-e79e-4d14-9cf8-bed0cf3d0293/volume-790e2a55-e79e-4d14-9cf8-bed0cf3d0293.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1218.031144] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-81aa53bf-887f-4605-8aa5-20762c8a867c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.048246] env[62522]: DEBUG oslo_vmware.api [None req-eed40bb7-c3db-4459-82fc-2e43b83066c3 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1218.048246] env[62522]: value = "task-2416376" [ 1218.048246] env[62522]: _type = "Task" [ 1218.048246] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.055399] env[62522]: DEBUG oslo_vmware.api [None req-eed40bb7-c3db-4459-82fc-2e43b83066c3 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416376, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.152693] env[62522]: DEBUG oslo_vmware.api [None req-2214c07d-abce-49bf-85e2-0899385f86e6 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416375, 'name': PowerOffVM_Task, 'duration_secs': 0.199031} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.152961] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2214c07d-abce-49bf-85e2-0899385f86e6 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1218.153180] env[62522]: DEBUG nova.compute.manager [None req-2214c07d-abce-49bf-85e2-0899385f86e6 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1218.153988] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-074a4965-dd55-41d6-b9f3-b7f2e90cbedd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.269679] env[62522]: DEBUG nova.network.neutron [req-8a16ac30-c094-4b1a-957b-15acd54d457b req-19542acc-40b4-432e-b85f-15c441215df5 service nova] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Updated VIF entry in instance network info cache for port 1f2dff01-fe9d-46ea-af42-ec9d20c5ac2e. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1218.270055] env[62522]: DEBUG nova.network.neutron [req-8a16ac30-c094-4b1a-957b-15acd54d457b req-19542acc-40b4-432e-b85f-15c441215df5 service nova] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Updating instance_info_cache with network_info: [{"id": "1f2dff01-fe9d-46ea-af42-ec9d20c5ac2e", "address": "fa:16:3e:08:27:40", "network": {"id": "fd993a8b-571c-4873-a5d6-4d8c60e23d38", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2084093882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "071dd4c295a54e388099d5bf0f4e300b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f2dff01-fe", "ovs_interfaceid": "1f2dff01-fe9d-46ea-af42-ec9d20c5ac2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1218.453014] env[62522]: DEBUG oslo_vmware.api [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f60980-7b2b-655c-f941-cfebb9ec2696, 'name': SearchDatastore_Task, 'duration_secs': 0.009663} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.453373] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1218.453628] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1218.453849] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1218.453999] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1218.454198] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1218.454454] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6f1c6ad7-3f4c-4a8f-a5d2-4fa8c6f48c0b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.462701] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1218.462891] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1218.463617] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8f6c500-2cc5-4e8b-880d-d4fbfb4c00d9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.468616] env[62522]: DEBUG oslo_vmware.api [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1218.468616] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5295cabd-e1e8-fa4e-0f79-40622a54b294" [ 1218.468616] env[62522]: _type = "Task" [ 1218.468616] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.476245] env[62522]: DEBUG oslo_vmware.api [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5295cabd-e1e8-fa4e-0f79-40622a54b294, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.559414] env[62522]: DEBUG oslo_vmware.api [None req-eed40bb7-c3db-4459-82fc-2e43b83066c3 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416376, 'name': ReconfigVM_Task, 'duration_secs': 0.316693} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.559722] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-eed40bb7-c3db-4459-82fc-2e43b83066c3 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Reconfigured VM instance instance-00000066 to attach disk [datastore1] volume-790e2a55-e79e-4d14-9cf8-bed0cf3d0293/volume-790e2a55-e79e-4d14-9cf8-bed0cf3d0293.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1218.564788] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4b82e4df-02ed-4a60-a130-0b183dccec97 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.580045] env[62522]: DEBUG oslo_vmware.api [None req-eed40bb7-c3db-4459-82fc-2e43b83066c3 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1218.580045] env[62522]: value = "task-2416377" [ 1218.580045] env[62522]: _type = "Task" [ 1218.580045] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.588207] env[62522]: DEBUG oslo_vmware.api [None req-eed40bb7-c3db-4459-82fc-2e43b83066c3 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416377, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.666773] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2214c07d-abce-49bf-85e2-0899385f86e6 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "bbb8ba81-9fed-419c-b2f9-ac5baaac3b88" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.048s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1218.773443] env[62522]: DEBUG oslo_concurrency.lockutils [req-8a16ac30-c094-4b1a-957b-15acd54d457b req-19542acc-40b4-432e-b85f-15c441215df5 service nova] Releasing lock "refresh_cache-7406a1a4-a342-475b-ad02-6a29f7c487ee" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1218.979404] env[62522]: DEBUG oslo_vmware.api [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5295cabd-e1e8-fa4e-0f79-40622a54b294, 'name': SearchDatastore_Task, 'duration_secs': 0.008878} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.980560] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fe253ec-d25c-465f-8fc3-d0da3d5bbdf3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.985992] env[62522]: DEBUG oslo_vmware.api [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1218.985992] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525e1e16-da1a-f367-64c5-ac5761b91684" [ 1218.985992] env[62522]: _type = "Task" [ 1218.985992] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.995286] env[62522]: DEBUG oslo_vmware.api [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525e1e16-da1a-f367-64c5-ac5761b91684, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.014676] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e83c2c23-98dd-4fea-8f2b-97a6358bd87b tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "bbb8ba81-9fed-419c-b2f9-ac5baaac3b88" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1219.014676] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e83c2c23-98dd-4fea-8f2b-97a6358bd87b tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "bbb8ba81-9fed-419c-b2f9-ac5baaac3b88" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1219.014676] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e83c2c23-98dd-4fea-8f2b-97a6358bd87b tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "bbb8ba81-9fed-419c-b2f9-ac5baaac3b88-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1219.014676] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e83c2c23-98dd-4fea-8f2b-97a6358bd87b tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "bbb8ba81-9fed-419c-b2f9-ac5baaac3b88-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1219.014999] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e83c2c23-98dd-4fea-8f2b-97a6358bd87b tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "bbb8ba81-9fed-419c-b2f9-ac5baaac3b88-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1219.016874] env[62522]: INFO nova.compute.manager [None req-e83c2c23-98dd-4fea-8f2b-97a6358bd87b tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Terminating instance [ 1219.089290] env[62522]: DEBUG oslo_vmware.api [None req-eed40bb7-c3db-4459-82fc-2e43b83066c3 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416377, 'name': ReconfigVM_Task, 'duration_secs': 0.149387} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.089510] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-eed40bb7-c3db-4459-82fc-2e43b83066c3 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489859', 'volume_id': '790e2a55-e79e-4d14-9cf8-bed0cf3d0293', 'name': 'volume-790e2a55-e79e-4d14-9cf8-bed0cf3d0293', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1c6451e0-2fae-4d2b-86d7-86f9537a6259', 'attached_at': '', 'detached_at': '', 'volume_id': '790e2a55-e79e-4d14-9cf8-bed0cf3d0293', 'serial': '790e2a55-e79e-4d14-9cf8-bed0cf3d0293'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1219.501012] env[62522]: DEBUG oslo_vmware.api [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525e1e16-da1a-f367-64c5-ac5761b91684, 'name': SearchDatastore_Task, 'duration_secs': 0.008586} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.501462] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1219.501857] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 7406a1a4-a342-475b-ad02-6a29f7c487ee/7406a1a4-a342-475b-ad02-6a29f7c487ee.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1219.502236] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d61ae837-4b34-41f7-8632-15accf03168b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.509691] env[62522]: DEBUG oslo_vmware.api [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1219.509691] env[62522]: value = "task-2416379" [ 1219.509691] env[62522]: _type = "Task" [ 1219.509691] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.517246] env[62522]: DEBUG oslo_vmware.api [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416379, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.520972] env[62522]: DEBUG nova.compute.manager [None req-e83c2c23-98dd-4fea-8f2b-97a6358bd87b tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1219.521110] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e83c2c23-98dd-4fea-8f2b-97a6358bd87b tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1219.521805] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98ab5101-ea9b-429e-8599-f4e77725a213 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.528443] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e83c2c23-98dd-4fea-8f2b-97a6358bd87b tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1219.528660] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fdec8000-1676-4d9a-af2e-0870859cdf4b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.590209] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e83c2c23-98dd-4fea-8f2b-97a6358bd87b tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1219.590513] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e83c2c23-98dd-4fea-8f2b-97a6358bd87b tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1219.590757] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-e83c2c23-98dd-4fea-8f2b-97a6358bd87b tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Deleting the datastore file [datastore1] bbb8ba81-9fed-419c-b2f9-ac5baaac3b88 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1219.591084] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4d47ad65-4ba0-4b74-bc58-5e64c819fe7c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.599805] env[62522]: DEBUG oslo_vmware.api [None req-e83c2c23-98dd-4fea-8f2b-97a6358bd87b tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1219.599805] env[62522]: value = "task-2416381" [ 1219.599805] env[62522]: _type = "Task" [ 1219.599805] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.607183] env[62522]: DEBUG oslo_vmware.api [None req-e83c2c23-98dd-4fea-8f2b-97a6358bd87b tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416381, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.019732] env[62522]: DEBUG oslo_vmware.api [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416379, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.109922] env[62522]: DEBUG oslo_vmware.api [None req-e83c2c23-98dd-4fea-8f2b-97a6358bd87b tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416381, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.393903} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.110225] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-e83c2c23-98dd-4fea-8f2b-97a6358bd87b tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1220.111020] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e83c2c23-98dd-4fea-8f2b-97a6358bd87b tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1220.111020] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e83c2c23-98dd-4fea-8f2b-97a6358bd87b tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1220.111020] env[62522]: INFO nova.compute.manager [None req-e83c2c23-98dd-4fea-8f2b-97a6358bd87b tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1220.111020] env[62522]: DEBUG oslo.service.loopingcall [None req-e83c2c23-98dd-4fea-8f2b-97a6358bd87b tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1220.111335] env[62522]: DEBUG nova.compute.manager [-] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1220.111440] env[62522]: DEBUG nova.network.neutron [-] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1220.123659] env[62522]: DEBUG nova.objects.instance [None req-eed40bb7-c3db-4459-82fc-2e43b83066c3 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lazy-loading 'flavor' on Instance uuid 1c6451e0-2fae-4d2b-86d7-86f9537a6259 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1220.455413] env[62522]: DEBUG nova.compute.manager [req-deec6bc4-a071-4bbc-b70d-aeaa7ba52244 req-4471be3b-31ac-43da-85bd-52a277e95c58 service nova] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Received event network-vif-deleted-dd7e0f4f-dfde-45d0-9cfc-335366201143 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1220.455413] env[62522]: INFO nova.compute.manager [req-deec6bc4-a071-4bbc-b70d-aeaa7ba52244 req-4471be3b-31ac-43da-85bd-52a277e95c58 service nova] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Neutron deleted interface dd7e0f4f-dfde-45d0-9cfc-335366201143; detaching it from the instance and deleting it from the info cache [ 1220.455413] env[62522]: DEBUG nova.network.neutron [req-deec6bc4-a071-4bbc-b70d-aeaa7ba52244 req-4471be3b-31ac-43da-85bd-52a277e95c58 service nova] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1220.520413] env[62522]: DEBUG oslo_vmware.api [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416379, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.628343] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eed40bb7-c3db-4459-82fc-2e43b83066c3 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "1c6451e0-2fae-4d2b-86d7-86f9537a6259" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.232s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1220.939944] env[62522]: DEBUG nova.network.neutron [-] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1220.957993] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6a3fbe5e-e2bc-4aee-9430-eebfadbabb33 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.969141] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b638f41d-472e-425d-882c-5fb65f428efc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.006980] env[62522]: DEBUG nova.compute.manager [req-deec6bc4-a071-4bbc-b70d-aeaa7ba52244 req-4471be3b-31ac-43da-85bd-52a277e95c58 service nova] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Detach interface failed, port_id=dd7e0f4f-dfde-45d0-9cfc-335366201143, reason: Instance bbb8ba81-9fed-419c-b2f9-ac5baaac3b88 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1221.025159] env[62522]: DEBUG oslo_vmware.api [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416379, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.454219} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.025563] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 7406a1a4-a342-475b-ad02-6a29f7c487ee/7406a1a4-a342-475b-ad02-6a29f7c487ee.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1221.025916] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1221.026308] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-38a11bda-f278-4980-944c-eba14e75e09f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.034298] env[62522]: DEBUG oslo_vmware.api [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1221.034298] env[62522]: value = "task-2416382" [ 1221.034298] env[62522]: _type = "Task" [ 1221.034298] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.042150] env[62522]: DEBUG oslo_vmware.api [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416382, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.397565] env[62522]: DEBUG nova.compute.manager [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Stashing vm_state: active {{(pid=62522) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1221.442998] env[62522]: INFO nova.compute.manager [-] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Took 1.33 seconds to deallocate network for instance. [ 1221.544500] env[62522]: DEBUG oslo_vmware.api [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416382, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.444671} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.544866] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1221.545497] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e5c280e-0f8c-4787-86e0-e463524d37be {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.568301] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] 7406a1a4-a342-475b-ad02-6a29f7c487ee/7406a1a4-a342-475b-ad02-6a29f7c487ee.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1221.568577] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-896d7814-dd58-41a3-b729-3f6d300a93e3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.587766] env[62522]: DEBUG oslo_vmware.api [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1221.587766] env[62522]: value = "task-2416383" [ 1221.587766] env[62522]: _type = "Task" [ 1221.587766] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.595823] env[62522]: DEBUG oslo_vmware.api [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416383, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.921558] env[62522]: DEBUG oslo_concurrency.lockutils [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1221.921841] env[62522]: DEBUG oslo_concurrency.lockutils [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1221.949739] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e83c2c23-98dd-4fea-8f2b-97a6358bd87b tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1222.098357] env[62522]: DEBUG oslo_vmware.api [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416383, 'name': ReconfigVM_Task, 'duration_secs': 0.299501} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.098562] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Reconfigured VM instance instance-0000006d to attach disk [datastore1] 7406a1a4-a342-475b-ad02-6a29f7c487ee/7406a1a4-a342-475b-ad02-6a29f7c487ee.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1222.099217] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-82f1813e-8944-4c9b-a7ea-17693b0275ce {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.105683] env[62522]: DEBUG oslo_vmware.api [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1222.105683] env[62522]: value = "task-2416385" [ 1222.105683] env[62522]: _type = "Task" [ 1222.105683] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.113538] env[62522]: DEBUG oslo_vmware.api [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416385, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.427304] env[62522]: INFO nova.compute.claims [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1222.615765] env[62522]: DEBUG oslo_vmware.api [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416385, 'name': Rename_Task, 'duration_secs': 0.138552} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.616142] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1222.616234] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-63e7c384-f208-4a4b-a35e-751103bfea25 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.623054] env[62522]: DEBUG oslo_vmware.api [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1222.623054] env[62522]: value = "task-2416386" [ 1222.623054] env[62522]: _type = "Task" [ 1222.623054] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.634519] env[62522]: DEBUG oslo_vmware.api [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416386, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.933640] env[62522]: INFO nova.compute.resource_tracker [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Updating resource usage from migration ae46d283-bdf8-472c-bbc7-23e6b987e02b [ 1223.028618] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beea5eb7-8a7a-4996-86ef-296c4b06469b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.036164] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f228917-cac0-4be2-a189-d2b13377d075 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.065574] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60463d77-4c74-4a21-a385-a94870551f41 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.072767] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c37ee1f7-089e-439b-9efe-49a864f860ec {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.086867] env[62522]: DEBUG nova.compute.provider_tree [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1223.132713] env[62522]: DEBUG oslo_vmware.api [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416386, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.590122] env[62522]: DEBUG nova.scheduler.client.report [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1223.633752] env[62522]: DEBUG oslo_vmware.api [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416386, 'name': PowerOnVM_Task, 'duration_secs': 0.98913} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.634066] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1223.634253] env[62522]: INFO nova.compute.manager [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Took 8.64 seconds to spawn the instance on the hypervisor. [ 1223.634441] env[62522]: DEBUG nova.compute.manager [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1223.635264] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b2e363-2e31-463e-9ee6-d765675d7396 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.095987] env[62522]: DEBUG oslo_concurrency.lockutils [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.174s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1224.096285] env[62522]: INFO nova.compute.manager [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Migrating [ 1224.103509] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e83c2c23-98dd-4fea-8f2b-97a6358bd87b tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.154s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1224.103769] env[62522]: DEBUG nova.objects.instance [None req-e83c2c23-98dd-4fea-8f2b-97a6358bd87b tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lazy-loading 'resources' on Instance uuid bbb8ba81-9fed-419c-b2f9-ac5baaac3b88 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1224.150805] env[62522]: INFO nova.compute.manager [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Took 13.39 seconds to build instance. [ 1224.616897] env[62522]: DEBUG oslo_concurrency.lockutils [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "refresh_cache-1c6451e0-2fae-4d2b-86d7-86f9537a6259" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1224.617108] env[62522]: DEBUG oslo_concurrency.lockutils [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquired lock "refresh_cache-1c6451e0-2fae-4d2b-86d7-86f9537a6259" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1224.617388] env[62522]: DEBUG nova.network.neutron [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1224.654197] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9ae0c92a-1112-4203-a548-ca49a31c858a tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "7406a1a4-a342-475b-ad02-6a29f7c487ee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.899s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1224.654766] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "7406a1a4-a342-475b-ad02-6a29f7c487ee" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 11.797s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1224.658553] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f6a4636-2499-4a62-bc21-b20b99f584da {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.735806] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee1c15ff-5467-4056-8dfb-a8baa2f19734 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.744255] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbb8c4ec-a7cb-4e99-aa7e-d520f4a6af7f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.777954] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0ea439e-2d46-479c-8ccf-56c110ab1865 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.785582] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdc3f106-a352-4b89-8ac2-27b613f8f7e9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.801293] env[62522]: DEBUG nova.compute.provider_tree [None req-e83c2c23-98dd-4fea-8f2b-97a6358bd87b tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1225.168815] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "7406a1a4-a342-475b-ad02-6a29f7c487ee" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.514s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1225.254953] env[62522]: DEBUG nova.compute.manager [req-28fa1d2d-11ba-4332-b1fb-50a4e6f3a211 req-d966f56a-45d7-477c-8397-e331950f5592 service nova] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Received event network-changed-1f2dff01-fe9d-46ea-af42-ec9d20c5ac2e {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1225.254953] env[62522]: DEBUG nova.compute.manager [req-28fa1d2d-11ba-4332-b1fb-50a4e6f3a211 req-d966f56a-45d7-477c-8397-e331950f5592 service nova] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Refreshing instance network info cache due to event network-changed-1f2dff01-fe9d-46ea-af42-ec9d20c5ac2e. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1225.254953] env[62522]: DEBUG oslo_concurrency.lockutils [req-28fa1d2d-11ba-4332-b1fb-50a4e6f3a211 req-d966f56a-45d7-477c-8397-e331950f5592 service nova] Acquiring lock "refresh_cache-7406a1a4-a342-475b-ad02-6a29f7c487ee" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1225.255112] env[62522]: DEBUG oslo_concurrency.lockutils [req-28fa1d2d-11ba-4332-b1fb-50a4e6f3a211 req-d966f56a-45d7-477c-8397-e331950f5592 service nova] Acquired lock "refresh_cache-7406a1a4-a342-475b-ad02-6a29f7c487ee" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1225.255491] env[62522]: DEBUG nova.network.neutron [req-28fa1d2d-11ba-4332-b1fb-50a4e6f3a211 req-d966f56a-45d7-477c-8397-e331950f5592 service nova] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Refreshing network info cache for port 1f2dff01-fe9d-46ea-af42-ec9d20c5ac2e {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1225.304991] env[62522]: DEBUG nova.scheduler.client.report [None req-e83c2c23-98dd-4fea-8f2b-97a6358bd87b tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1225.409049] env[62522]: DEBUG nova.network.neutron [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Updating instance_info_cache with network_info: [{"id": "66b8c64e-5981-4cc9-b51a-df5bce03233c", "address": "fa:16:3e:ab:2f:6d", "network": {"id": "2a4f6f01-ad28-4f8f-b835-ea86e1791f49", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1577320995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82346c440c3343a0a5c233a48203a13c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66b8c64e-59", "ovs_interfaceid": "66b8c64e-5981-4cc9-b51a-df5bce03233c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1225.813456] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e83c2c23-98dd-4fea-8f2b-97a6358bd87b tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.709s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1225.841465] env[62522]: INFO nova.scheduler.client.report [None req-e83c2c23-98dd-4fea-8f2b-97a6358bd87b tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Deleted allocations for instance bbb8ba81-9fed-419c-b2f9-ac5baaac3b88 [ 1225.911640] env[62522]: DEBUG oslo_concurrency.lockutils [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Releasing lock "refresh_cache-1c6451e0-2fae-4d2b-86d7-86f9537a6259" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1226.054221] env[62522]: DEBUG nova.network.neutron [req-28fa1d2d-11ba-4332-b1fb-50a4e6f3a211 req-d966f56a-45d7-477c-8397-e331950f5592 service nova] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Updated VIF entry in instance network info cache for port 1f2dff01-fe9d-46ea-af42-ec9d20c5ac2e. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1226.054601] env[62522]: DEBUG nova.network.neutron [req-28fa1d2d-11ba-4332-b1fb-50a4e6f3a211 req-d966f56a-45d7-477c-8397-e331950f5592 service nova] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Updating instance_info_cache with network_info: [{"id": "1f2dff01-fe9d-46ea-af42-ec9d20c5ac2e", "address": "fa:16:3e:08:27:40", "network": {"id": "fd993a8b-571c-4873-a5d6-4d8c60e23d38", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2084093882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.178", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "071dd4c295a54e388099d5bf0f4e300b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f2dff01-fe", "ovs_interfaceid": "1f2dff01-fe9d-46ea-af42-ec9d20c5ac2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1226.349560] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e83c2c23-98dd-4fea-8f2b-97a6358bd87b tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "bbb8ba81-9fed-419c-b2f9-ac5baaac3b88" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.335s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1226.557433] env[62522]: DEBUG oslo_concurrency.lockutils [req-28fa1d2d-11ba-4332-b1fb-50a4e6f3a211 req-d966f56a-45d7-477c-8397-e331950f5592 service nova] Releasing lock "refresh_cache-7406a1a4-a342-475b-ad02-6a29f7c487ee" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1227.130473] env[62522]: DEBUG oslo_concurrency.lockutils [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "0ba51973-2ffe-460c-a4e2-c9e2a2b768b1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1227.130766] env[62522]: DEBUG oslo_concurrency.lockutils [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "0ba51973-2ffe-460c-a4e2-c9e2a2b768b1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1227.425992] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3599f766-c9f6-431c-9300-739361c2fb7f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.446928] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Updating instance '1c6451e0-2fae-4d2b-86d7-86f9537a6259' progress to 0 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1227.633160] env[62522]: DEBUG nova.compute.manager [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1227.774117] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1227.952964] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1227.953291] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-16e475c6-ebbb-4fb7-a0f9-a185f22c6b71 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.961069] env[62522]: DEBUG oslo_vmware.api [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1227.961069] env[62522]: value = "task-2416387" [ 1227.961069] env[62522]: _type = "Task" [ 1227.961069] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.968559] env[62522]: DEBUG oslo_vmware.api [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416387, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.156868] env[62522]: DEBUG oslo_concurrency.lockutils [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1228.157191] env[62522]: DEBUG oslo_concurrency.lockutils [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1228.159027] env[62522]: INFO nova.compute.claims [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1228.470830] env[62522]: DEBUG oslo_vmware.api [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416387, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.973053] env[62522]: DEBUG oslo_vmware.api [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416387, 'name': PowerOffVM_Task, 'duration_secs': 0.617988} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.973053] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1228.973053] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Updating instance '1c6451e0-2fae-4d2b-86d7-86f9537a6259' progress to 17 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1229.246083] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1229.263145] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d01b30ef-b73a-4d15-91a9-d7041b5f1980 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.272707] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79b58297-12f5-4382-ae5b-d2394bdf00de {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.301996] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8f0e36a-6112-4178-9e50-e09232e7ed95 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.308956] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d048744e-9290-4def-998c-a6f749ab5c41 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.322044] env[62522]: DEBUG nova.compute.provider_tree [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1229.479492] env[62522]: DEBUG nova.virt.hardware [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:04Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1229.479704] env[62522]: DEBUG nova.virt.hardware [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1229.479838] env[62522]: DEBUG nova.virt.hardware [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1229.480035] env[62522]: DEBUG nova.virt.hardware [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1229.480193] env[62522]: DEBUG nova.virt.hardware [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1229.480344] env[62522]: DEBUG nova.virt.hardware [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1229.480548] env[62522]: DEBUG nova.virt.hardware [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1229.480756] env[62522]: DEBUG nova.virt.hardware [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1229.480882] env[62522]: DEBUG nova.virt.hardware [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1229.481057] env[62522]: DEBUG nova.virt.hardware [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1229.481236] env[62522]: DEBUG nova.virt.hardware [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1229.486176] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9f921d05-3892-4a66-907a-44ec8e0af2d0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.502312] env[62522]: DEBUG oslo_vmware.api [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1229.502312] env[62522]: value = "task-2416388" [ 1229.502312] env[62522]: _type = "Task" [ 1229.502312] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.512600] env[62522]: DEBUG oslo_vmware.api [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416388, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.749620] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1229.825185] env[62522]: DEBUG nova.scheduler.client.report [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1230.012133] env[62522]: DEBUG oslo_vmware.api [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416388, 'name': ReconfigVM_Task, 'duration_secs': 0.172927} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.012395] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Updating instance '1c6451e0-2fae-4d2b-86d7-86f9537a6259' progress to 33 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1230.331047] env[62522]: DEBUG oslo_concurrency.lockutils [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.173s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1230.331047] env[62522]: DEBUG nova.compute.manager [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1230.333847] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.584s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1230.334081] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1230.334204] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62522) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1230.335471] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c27eebd-2f99-4f81-b3e0-b07e7652078b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.343843] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27c5002c-0e4e-4060-9609-f9991262cb64 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.358775] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd49a1c-391c-4238-a2cc-b2bd5ed6b1ab {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.364935] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b1043f9-de32-4d58-a9a4-68c7bc98f208 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.394610] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180012MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=62522) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1230.394746] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1230.394950] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1230.519548] env[62522]: DEBUG nova.virt.hardware [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1230.519845] env[62522]: DEBUG nova.virt.hardware [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1230.520021] env[62522]: DEBUG nova.virt.hardware [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1230.520210] env[62522]: DEBUG nova.virt.hardware [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1230.520358] env[62522]: DEBUG nova.virt.hardware [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1230.520504] env[62522]: DEBUG nova.virt.hardware [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1230.520706] env[62522]: DEBUG nova.virt.hardware [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1230.520865] env[62522]: DEBUG nova.virt.hardware [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1230.521041] env[62522]: DEBUG nova.virt.hardware [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1230.521207] env[62522]: DEBUG nova.virt.hardware [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1230.521378] env[62522]: DEBUG nova.virt.hardware [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1230.526628] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Reconfiguring VM instance instance-00000066 to detach disk 2000 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1230.526892] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-04136b3a-087c-4dbc-ad5a-4faa1ff869a5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.546048] env[62522]: DEBUG oslo_vmware.api [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1230.546048] env[62522]: value = "task-2416389" [ 1230.546048] env[62522]: _type = "Task" [ 1230.546048] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.553757] env[62522]: DEBUG oslo_vmware.api [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416389, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.840543] env[62522]: DEBUG nova.compute.utils [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1230.841984] env[62522]: DEBUG nova.compute.manager [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1230.842174] env[62522]: DEBUG nova.network.neutron [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1230.890533] env[62522]: DEBUG nova.policy [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f26eeb125397426baca60d80d635c4b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a06421250694a98b13ff34ad816dc75', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1231.055889] env[62522]: DEBUG oslo_vmware.api [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416389, 'name': ReconfigVM_Task, 'duration_secs': 0.465969} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.056573] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Reconfigured VM instance instance-00000066 to detach disk 2000 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1231.057359] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-185945e6-25d2-4631-9c43-fe47f802b55d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.082443] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] 1c6451e0-2fae-4d2b-86d7-86f9537a6259/1c6451e0-2fae-4d2b-86d7-86f9537a6259.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1231.082747] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-77a79d24-5930-4a32-b45b-2f4e044e92b6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.100330] env[62522]: DEBUG oslo_vmware.api [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1231.100330] env[62522]: value = "task-2416390" [ 1231.100330] env[62522]: _type = "Task" [ 1231.100330] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.108348] env[62522]: DEBUG oslo_vmware.api [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416390, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.129034] env[62522]: DEBUG nova.network.neutron [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Successfully created port: 99aa2548-94d3-41e4-8cad-1043809dc3df {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1231.346437] env[62522]: DEBUG nova.compute.manager [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1231.403099] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Applying migration context for instance 1c6451e0-2fae-4d2b-86d7-86f9537a6259 as it has an incoming, in-progress migration ae46d283-bdf8-472c-bbc7-23e6b987e02b. Migration status is migrating {{(pid=62522) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1231.403964] env[62522]: INFO nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Updating resource usage from migration ae46d283-bdf8-472c-bbc7-23e6b987e02b [ 1231.420103] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance b31195c2-29f4-475c-baa7-fcb4791b7278 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1231.420263] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance cb7a19f1-6093-47ee-bbbc-a75dd5423f32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1231.420388] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance a4cb5c19-9087-4354-9689-a99ae8924dc1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1231.420507] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 27f4b976-7dff-49b0-9b00-7515cb976e72 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1231.420622] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 7406a1a4-a342-475b-ad02-6a29f7c487ee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1231.420738] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Migration ae46d283-bdf8-472c-bbc7-23e6b987e02b is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1231.420850] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 1c6451e0-2fae-4d2b-86d7-86f9537a6259 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1231.420962] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1231.421160] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=62522) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1231.421295] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2112MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=62522) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1231.523454] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f480b56-d5e0-4d5f-8732-a8b144e7f2c5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.530752] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2ea53be-5567-4912-b875-a23f37261700 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.561619] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f148345-e84e-43c8-923c-306acc773bb8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.569300] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9987eb8e-c9cf-4a93-88ca-9e24432bc28c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.582214] env[62522]: DEBUG nova.compute.provider_tree [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1231.609348] env[62522]: DEBUG oslo_vmware.api [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416390, 'name': ReconfigVM_Task, 'duration_secs': 0.451899} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.609688] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Reconfigured VM instance instance-00000066 to attach disk [datastore1] 1c6451e0-2fae-4d2b-86d7-86f9537a6259/1c6451e0-2fae-4d2b-86d7-86f9537a6259.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1231.610046] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Updating instance '1c6451e0-2fae-4d2b-86d7-86f9537a6259' progress to 50 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1232.085834] env[62522]: DEBUG nova.scheduler.client.report [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1232.116443] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c807fa70-67a3-40f5-a7c4-3640779c1ac7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.138745] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0102e934-3242-4df4-8dbb-33427b37c56d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.158325] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Updating instance '1c6451e0-2fae-4d2b-86d7-86f9537a6259' progress to 67 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1232.358175] env[62522]: DEBUG nova.compute.manager [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1232.384383] env[62522]: DEBUG nova.virt.hardware [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1232.384636] env[62522]: DEBUG nova.virt.hardware [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1232.384799] env[62522]: DEBUG nova.virt.hardware [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1232.384980] env[62522]: DEBUG nova.virt.hardware [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1232.385140] env[62522]: DEBUG nova.virt.hardware [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1232.385292] env[62522]: DEBUG nova.virt.hardware [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1232.385505] env[62522]: DEBUG nova.virt.hardware [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1232.385664] env[62522]: DEBUG nova.virt.hardware [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1232.385833] env[62522]: DEBUG nova.virt.hardware [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1232.385994] env[62522]: DEBUG nova.virt.hardware [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1232.386340] env[62522]: DEBUG nova.virt.hardware [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1232.387257] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4fa3419-87c5-4334-a119-e467b634eaa8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.395079] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1aea847-904d-489d-bb9b-980be36378cc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.498579] env[62522]: DEBUG nova.compute.manager [req-01b230ec-0fb6-4902-bea9-501c9a96a09f req-3413e343-4e84-40a0-8c67-74ecb8b5adce service nova] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Received event network-vif-plugged-99aa2548-94d3-41e4-8cad-1043809dc3df {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1232.499062] env[62522]: DEBUG oslo_concurrency.lockutils [req-01b230ec-0fb6-4902-bea9-501c9a96a09f req-3413e343-4e84-40a0-8c67-74ecb8b5adce service nova] Acquiring lock "0ba51973-2ffe-460c-a4e2-c9e2a2b768b1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1232.499255] env[62522]: DEBUG oslo_concurrency.lockutils [req-01b230ec-0fb6-4902-bea9-501c9a96a09f req-3413e343-4e84-40a0-8c67-74ecb8b5adce service nova] Lock "0ba51973-2ffe-460c-a4e2-c9e2a2b768b1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1232.499439] env[62522]: DEBUG oslo_concurrency.lockutils [req-01b230ec-0fb6-4902-bea9-501c9a96a09f req-3413e343-4e84-40a0-8c67-74ecb8b5adce service nova] Lock "0ba51973-2ffe-460c-a4e2-c9e2a2b768b1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1232.499617] env[62522]: DEBUG nova.compute.manager [req-01b230ec-0fb6-4902-bea9-501c9a96a09f req-3413e343-4e84-40a0-8c67-74ecb8b5adce service nova] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] No waiting events found dispatching network-vif-plugged-99aa2548-94d3-41e4-8cad-1043809dc3df {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1232.499791] env[62522]: WARNING nova.compute.manager [req-01b230ec-0fb6-4902-bea9-501c9a96a09f req-3413e343-4e84-40a0-8c67-74ecb8b5adce service nova] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Received unexpected event network-vif-plugged-99aa2548-94d3-41e4-8cad-1043809dc3df for instance with vm_state building and task_state spawning. [ 1232.590965] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62522) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1232.591190] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.196s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1232.613813] env[62522]: DEBUG nova.network.neutron [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Successfully updated port: 99aa2548-94d3-41e4-8cad-1043809dc3df {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1233.116060] env[62522]: DEBUG oslo_concurrency.lockutils [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "refresh_cache-0ba51973-2ffe-460c-a4e2-c9e2a2b768b1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1233.116213] env[62522]: DEBUG oslo_concurrency.lockutils [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquired lock "refresh_cache-0ba51973-2ffe-460c-a4e2-c9e2a2b768b1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1233.116354] env[62522]: DEBUG nova.network.neutron [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1233.587455] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1233.587847] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1233.587945] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Starting heal instance info cache {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1233.647071] env[62522]: DEBUG nova.network.neutron [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1233.816775] env[62522]: DEBUG nova.network.neutron [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Port 66b8c64e-5981-4cc9-b51a-df5bce03233c binding to destination host cpu-1 is already ACTIVE {{(pid=62522) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1233.839119] env[62522]: DEBUG nova.network.neutron [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Updating instance_info_cache with network_info: [{"id": "99aa2548-94d3-41e4-8cad-1043809dc3df", "address": "fa:16:3e:ce:57:e7", "network": {"id": "21d0ca22-5509-4f9b-b167-f9fde2dac808", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-547933771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a06421250694a98b13ff34ad816dc75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99aa2548-94", "ovs_interfaceid": "99aa2548-94d3-41e4-8cad-1043809dc3df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1234.341305] env[62522]: DEBUG oslo_concurrency.lockutils [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Releasing lock "refresh_cache-0ba51973-2ffe-460c-a4e2-c9e2a2b768b1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1234.342246] env[62522]: DEBUG nova.compute.manager [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Instance network_info: |[{"id": "99aa2548-94d3-41e4-8cad-1043809dc3df", "address": "fa:16:3e:ce:57:e7", "network": {"id": "21d0ca22-5509-4f9b-b167-f9fde2dac808", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-547933771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a06421250694a98b13ff34ad816dc75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99aa2548-94", "ovs_interfaceid": "99aa2548-94d3-41e4-8cad-1043809dc3df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1234.342246] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ce:57:e7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24144f5a-050a-4f1e-8d8c-774dc16dc791', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '99aa2548-94d3-41e4-8cad-1043809dc3df', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1234.349483] env[62522]: DEBUG oslo.service.loopingcall [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1234.349684] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1234.349901] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f79b1af1-f12e-4fcc-905f-f30f26c4e945 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.370763] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1234.370763] env[62522]: value = "task-2416391" [ 1234.370763] env[62522]: _type = "Task" [ 1234.370763] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.379177] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416391, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.526999] env[62522]: DEBUG nova.compute.manager [req-d81d2bf4-ed74-4dfe-93fe-7006d6ee0da1 req-0c598299-efce-469d-b990-a9f763c4b1b1 service nova] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Received event network-changed-99aa2548-94d3-41e4-8cad-1043809dc3df {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1234.527234] env[62522]: DEBUG nova.compute.manager [req-d81d2bf4-ed74-4dfe-93fe-7006d6ee0da1 req-0c598299-efce-469d-b990-a9f763c4b1b1 service nova] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Refreshing instance network info cache due to event network-changed-99aa2548-94d3-41e4-8cad-1043809dc3df. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1234.527450] env[62522]: DEBUG oslo_concurrency.lockutils [req-d81d2bf4-ed74-4dfe-93fe-7006d6ee0da1 req-0c598299-efce-469d-b990-a9f763c4b1b1 service nova] Acquiring lock "refresh_cache-0ba51973-2ffe-460c-a4e2-c9e2a2b768b1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1234.527592] env[62522]: DEBUG oslo_concurrency.lockutils [req-d81d2bf4-ed74-4dfe-93fe-7006d6ee0da1 req-0c598299-efce-469d-b990-a9f763c4b1b1 service nova] Acquired lock "refresh_cache-0ba51973-2ffe-460c-a4e2-c9e2a2b768b1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1234.527755] env[62522]: DEBUG nova.network.neutron [req-d81d2bf4-ed74-4dfe-93fe-7006d6ee0da1 req-0c598299-efce-469d-b990-a9f763c4b1b1 service nova] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Refreshing network info cache for port 99aa2548-94d3-41e4-8cad-1043809dc3df {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1234.835397] env[62522]: DEBUG oslo_concurrency.lockutils [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "1c6451e0-2fae-4d2b-86d7-86f9537a6259-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1234.835720] env[62522]: DEBUG oslo_concurrency.lockutils [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "1c6451e0-2fae-4d2b-86d7-86f9537a6259-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1234.835835] env[62522]: DEBUG oslo_concurrency.lockutils [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "1c6451e0-2fae-4d2b-86d7-86f9537a6259-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1234.880611] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416391, 'name': CreateVM_Task, 'duration_secs': 0.299111} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.880777] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1234.891997] env[62522]: DEBUG oslo_concurrency.lockutils [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1234.892187] env[62522]: DEBUG oslo_concurrency.lockutils [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1234.892491] env[62522]: DEBUG oslo_concurrency.lockutils [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1234.892740] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb190444-198b-481e-af2e-3c503fe226f8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.897896] env[62522]: DEBUG oslo_vmware.api [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1234.897896] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e95fbf-5a0c-73b9-c764-92cc2133de98" [ 1234.897896] env[62522]: _type = "Task" [ 1234.897896] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.905462] env[62522]: DEBUG oslo_vmware.api [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e95fbf-5a0c-73b9-c764-92cc2133de98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.139495] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "refresh_cache-b31195c2-29f4-475c-baa7-fcb4791b7278" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1235.139641] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquired lock "refresh_cache-b31195c2-29f4-475c-baa7-fcb4791b7278" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1235.139797] env[62522]: DEBUG nova.network.neutron [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Forcefully refreshing network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1235.278295] env[62522]: DEBUG nova.network.neutron [req-d81d2bf4-ed74-4dfe-93fe-7006d6ee0da1 req-0c598299-efce-469d-b990-a9f763c4b1b1 service nova] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Updated VIF entry in instance network info cache for port 99aa2548-94d3-41e4-8cad-1043809dc3df. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1235.278668] env[62522]: DEBUG nova.network.neutron [req-d81d2bf4-ed74-4dfe-93fe-7006d6ee0da1 req-0c598299-efce-469d-b990-a9f763c4b1b1 service nova] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Updating instance_info_cache with network_info: [{"id": "99aa2548-94d3-41e4-8cad-1043809dc3df", "address": "fa:16:3e:ce:57:e7", "network": {"id": "21d0ca22-5509-4f9b-b167-f9fde2dac808", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-547933771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a06421250694a98b13ff34ad816dc75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99aa2548-94", "ovs_interfaceid": "99aa2548-94d3-41e4-8cad-1043809dc3df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1235.408413] env[62522]: DEBUG oslo_vmware.api [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e95fbf-5a0c-73b9-c764-92cc2133de98, 'name': SearchDatastore_Task, 'duration_secs': 0.009433} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.408707] env[62522]: DEBUG oslo_concurrency.lockutils [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1235.408934] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1235.409181] env[62522]: DEBUG oslo_concurrency.lockutils [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1235.409325] env[62522]: DEBUG oslo_concurrency.lockutils [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1235.409508] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1235.409780] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5194603e-c007-4d8a-a359-a6d87467d78e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.418072] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1235.418245] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1235.419048] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5377a6c8-426f-4fc3-a28a-270c3be68d95 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.423784] env[62522]: DEBUG oslo_vmware.api [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1235.423784] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e1cd67-36e7-5383-0dbb-cbb86b4f4f46" [ 1235.423784] env[62522]: _type = "Task" [ 1235.423784] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.430790] env[62522]: DEBUG oslo_vmware.api [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e1cd67-36e7-5383-0dbb-cbb86b4f4f46, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.781796] env[62522]: DEBUG oslo_concurrency.lockutils [req-d81d2bf4-ed74-4dfe-93fe-7006d6ee0da1 req-0c598299-efce-469d-b990-a9f763c4b1b1 service nova] Releasing lock "refresh_cache-0ba51973-2ffe-460c-a4e2-c9e2a2b768b1" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1235.872921] env[62522]: DEBUG oslo_concurrency.lockutils [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "refresh_cache-1c6451e0-2fae-4d2b-86d7-86f9537a6259" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1235.873162] env[62522]: DEBUG oslo_concurrency.lockutils [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquired lock "refresh_cache-1c6451e0-2fae-4d2b-86d7-86f9537a6259" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1235.873317] env[62522]: DEBUG nova.network.neutron [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1235.935895] env[62522]: DEBUG oslo_vmware.api [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e1cd67-36e7-5383-0dbb-cbb86b4f4f46, 'name': SearchDatastore_Task, 'duration_secs': 0.007476} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.936944] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-126b76e0-a72f-4023-b254-73fd1b8e2538 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.942297] env[62522]: DEBUG oslo_vmware.api [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1235.942297] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f27edd-cf8e-52d0-3aea-b1cb07ba18e9" [ 1235.942297] env[62522]: _type = "Task" [ 1235.942297] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.951717] env[62522]: DEBUG oslo_vmware.api [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f27edd-cf8e-52d0-3aea-b1cb07ba18e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.451883] env[62522]: DEBUG oslo_vmware.api [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f27edd-cf8e-52d0-3aea-b1cb07ba18e9, 'name': SearchDatastore_Task, 'duration_secs': 0.009644} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.452458] env[62522]: DEBUG oslo_concurrency.lockutils [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1236.452706] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1/0ba51973-2ffe-460c-a4e2-c9e2a2b768b1.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1236.452959] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7ec457d7-b4e4-4e5d-b2ec-97fb57fe9309 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.460683] env[62522]: DEBUG oslo_vmware.api [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1236.460683] env[62522]: value = "task-2416392" [ 1236.460683] env[62522]: _type = "Task" [ 1236.460683] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.467947] env[62522]: DEBUG oslo_vmware.api [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416392, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.519885] env[62522]: DEBUG nova.network.neutron [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Updating instance_info_cache with network_info: [{"id": "58444651-b47b-44d5-b240-53949c79df86", "address": "fa:16:3e:6c:81:68", "network": {"id": "fd993a8b-571c-4873-a5d6-4d8c60e23d38", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2084093882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "071dd4c295a54e388099d5bf0f4e300b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58444651-b4", "ovs_interfaceid": "58444651-b47b-44d5-b240-53949c79df86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1236.654437] env[62522]: DEBUG nova.network.neutron [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Updating instance_info_cache with network_info: [{"id": "66b8c64e-5981-4cc9-b51a-df5bce03233c", "address": "fa:16:3e:ab:2f:6d", "network": {"id": "2a4f6f01-ad28-4f8f-b835-ea86e1791f49", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1577320995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82346c440c3343a0a5c233a48203a13c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66b8c64e-59", "ovs_interfaceid": "66b8c64e-5981-4cc9-b51a-df5bce03233c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1236.684040] env[62522]: DEBUG oslo_concurrency.lockutils [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "cb7a19f1-6093-47ee-bbbc-a75dd5423f32" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1236.684322] env[62522]: DEBUG oslo_concurrency.lockutils [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "cb7a19f1-6093-47ee-bbbc-a75dd5423f32" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1236.684513] env[62522]: INFO nova.compute.manager [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Shelving [ 1236.970418] env[62522]: DEBUG oslo_vmware.api [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416392, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.425171} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.970779] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1/0ba51973-2ffe-460c-a4e2-c9e2a2b768b1.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1236.970849] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1236.971070] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1e33ab07-8f17-4786-9fe6-6087eb2a58ea {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.977455] env[62522]: DEBUG oslo_vmware.api [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1236.977455] env[62522]: value = "task-2416393" [ 1236.977455] env[62522]: _type = "Task" [ 1236.977455] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.984739] env[62522]: DEBUG oslo_vmware.api [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416393, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.022453] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Releasing lock "refresh_cache-b31195c2-29f4-475c-baa7-fcb4791b7278" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1237.022667] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Updated the network info_cache for instance {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 1237.022872] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1237.023045] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1237.023198] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1237.023344] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1237.023493] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1237.023623] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62522) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1237.157050] env[62522]: DEBUG oslo_concurrency.lockutils [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Releasing lock "refresh_cache-1c6451e0-2fae-4d2b-86d7-86f9537a6259" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1237.315278] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2a56d750-2091-40c4-ad97-ff0b121e3a30 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "a4cb5c19-9087-4354-9689-a99ae8924dc1" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1237.315512] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2a56d750-2091-40c4-ad97-ff0b121e3a30 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "a4cb5c19-9087-4354-9689-a99ae8924dc1" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1237.486806] env[62522]: DEBUG oslo_vmware.api [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416393, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08832} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.487058] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1237.487830] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c915cc2-f225-4d4a-b8a0-1253699dc500 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.509249] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1/0ba51973-2ffe-460c-a4e2-c9e2a2b768b1.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1237.509508] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cbf100d2-420f-410a-824b-3e3bb2057de7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.528529] env[62522]: DEBUG oslo_vmware.api [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1237.528529] env[62522]: value = "task-2416394" [ 1237.528529] env[62522]: _type = "Task" [ 1237.528529] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.538586] env[62522]: DEBUG oslo_vmware.api [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416394, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.665958] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-868454c2-e994-4fa8-bf6d-edcb8183486e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.673427] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5dcb7ee-b4a4-449e-b4b1-07ca978cf827 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.692533] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1237.692790] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6bc87911-a464-47c2-ad99-e8afbd47ffe4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.699627] env[62522]: DEBUG oslo_vmware.api [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1237.699627] env[62522]: value = "task-2416395" [ 1237.699627] env[62522]: _type = "Task" [ 1237.699627] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.707798] env[62522]: DEBUG oslo_vmware.api [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416395, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.819572] env[62522]: DEBUG nova.compute.utils [None req-2a56d750-2091-40c4-ad97-ff0b121e3a30 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1238.038292] env[62522]: DEBUG oslo_vmware.api [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416394, 'name': ReconfigVM_Task, 'duration_secs': 0.312443} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.038656] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Reconfigured VM instance instance-0000006e to attach disk [datastore1] 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1/0ba51973-2ffe-460c-a4e2-c9e2a2b768b1.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1238.039116] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dd6e78de-fc56-48d4-afbc-25693a3f421a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.045740] env[62522]: DEBUG oslo_vmware.api [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1238.045740] env[62522]: value = "task-2416396" [ 1238.045740] env[62522]: _type = "Task" [ 1238.045740] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.053764] env[62522]: DEBUG oslo_vmware.api [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416396, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.210152] env[62522]: DEBUG oslo_vmware.api [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416395, 'name': PowerOffVM_Task, 'duration_secs': 0.183895} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.210649] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1238.211317] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2658508f-148c-4780-9fbb-304260b626ff {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.877149] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2a56d750-2091-40c4-ad97-ff0b121e3a30 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "a4cb5c19-9087-4354-9689-a99ae8924dc1" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.561s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1238.899294] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5a79a4a-0cba-42be-b97e-5b693ae2b460 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.901771] env[62522]: DEBUG oslo_vmware.api [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416396, 'name': Rename_Task, 'duration_secs': 0.139839} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.903653] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1238.904378] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6fb68b47-5808-468f-b273-9bf214c76317 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.914201] env[62522]: DEBUG oslo_vmware.api [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1238.914201] env[62522]: value = "task-2416397" [ 1238.914201] env[62522]: _type = "Task" [ 1238.914201] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.921765] env[62522]: DEBUG oslo_vmware.api [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416397, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.410748] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Creating Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1239.411050] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1af63058-582e-4ef1-9bd2-972dc384ba7d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.419919] env[62522]: DEBUG oslo_vmware.api [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1239.419919] env[62522]: value = "task-2416398" [ 1239.419919] env[62522]: _type = "Task" [ 1239.419919] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.427065] env[62522]: DEBUG oslo_vmware.api [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416397, 'name': PowerOnVM_Task, 'duration_secs': 0.476396} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.427739] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5df0df98-53a1-49ee-955e-9a22f5f8fc7d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.430370] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1239.430609] env[62522]: INFO nova.compute.manager [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Took 7.07 seconds to spawn the instance on the hypervisor. [ 1239.430802] env[62522]: DEBUG nova.compute.manager [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1239.431502] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ea5a6b6-7204-4fd7-bbb7-a7e2a9d6c143 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.437275] env[62522]: DEBUG oslo_vmware.api [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416398, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.454232] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08aa28fb-f992-4c01-aed7-38a0a9e45300 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.464592] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Updating instance '1c6451e0-2fae-4d2b-86d7-86f9537a6259' progress to 83 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1239.890368] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2a56d750-2091-40c4-ad97-ff0b121e3a30 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "a4cb5c19-9087-4354-9689-a99ae8924dc1" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1239.890646] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2a56d750-2091-40c4-ad97-ff0b121e3a30 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "a4cb5c19-9087-4354-9689-a99ae8924dc1" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1239.890894] env[62522]: INFO nova.compute.manager [None req-2a56d750-2091-40c4-ad97-ff0b121e3a30 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Attaching volume 792bdc79-fff9-48a5-b954-77d64857a962 to /dev/sdb [ 1239.925694] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc8ccba2-7c34-4a54-a7eb-cab846c298ac {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.935724] env[62522]: DEBUG oslo_vmware.api [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416398, 'name': CreateSnapshot_Task, 'duration_secs': 0.457564} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.936165] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Created Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1239.936914] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-732b946f-afe1-405b-adc7-18e0a1d5f56f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.941735] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19f0085d-e0d5-4f94-9bd2-55d8abd26653 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.960622] env[62522]: DEBUG nova.virt.block_device [None req-2a56d750-2091-40c4-ad97-ff0b121e3a30 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Updating existing volume attachment record: 65b123ad-52cd-43df-b638-58d507c2d569 {{(pid=62522) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1239.970518] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1239.970518] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-55c24bf6-373e-49df-afa9-7df8640dbf39 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.972907] env[62522]: INFO nova.compute.manager [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Took 11.83 seconds to build instance. [ 1239.980267] env[62522]: DEBUG oslo_vmware.api [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1239.980267] env[62522]: value = "task-2416399" [ 1239.980267] env[62522]: _type = "Task" [ 1239.980267] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.989671] env[62522]: DEBUG oslo_vmware.api [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416399, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.439614] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05a8ad34-b565-40aa-ba27-aa2c6f6829e3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.446324] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-effb176a-b3b6-4285-a4ab-f148275bd685 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Suspending the VM {{(pid=62522) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1240.446582] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-80eaf469-2a11-4af2-b514-1ced997b9e9c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.458017] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Creating linked-clone VM from snapshot {{(pid=62522) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1240.459691] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-dff7f094-4be9-43bc-be4f-8359c42708a5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.462783] env[62522]: DEBUG oslo_vmware.api [None req-effb176a-b3b6-4285-a4ab-f148275bd685 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1240.462783] env[62522]: value = "task-2416403" [ 1240.462783] env[62522]: _type = "Task" [ 1240.462783] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.468373] env[62522]: DEBUG oslo_vmware.api [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1240.468373] env[62522]: value = "task-2416404" [ 1240.468373] env[62522]: _type = "Task" [ 1240.468373] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.474600] env[62522]: DEBUG oslo_vmware.api [None req-effb176a-b3b6-4285-a4ab-f148275bd685 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416403, 'name': SuspendVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.475282] env[62522]: DEBUG oslo_concurrency.lockutils [None req-17e2893d-7568-49c4-ae9e-aed544ed7672 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "0ba51973-2ffe-460c-a4e2-c9e2a2b768b1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.345s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1240.479304] env[62522]: DEBUG oslo_vmware.api [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416404, 'name': CloneVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.487678] env[62522]: DEBUG oslo_vmware.api [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416399, 'name': PowerOnVM_Task, 'duration_secs': 0.486317} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.487921] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1240.488121] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-87cedaf6-1ddb-45c9-b359-4727125614ac tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Updating instance '1c6451e0-2fae-4d2b-86d7-86f9537a6259' progress to 100 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1240.978979] env[62522]: DEBUG oslo_vmware.api [None req-effb176a-b3b6-4285-a4ab-f148275bd685 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416403, 'name': SuspendVM_Task} progress is 62%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.985329] env[62522]: DEBUG oslo_vmware.api [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416404, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.473079] env[62522]: DEBUG oslo_vmware.api [None req-effb176a-b3b6-4285-a4ab-f148275bd685 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416403, 'name': SuspendVM_Task, 'duration_secs': 0.595482} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.476366] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-effb176a-b3b6-4285-a4ab-f148275bd685 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Suspended the VM {{(pid=62522) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1241.476680] env[62522]: DEBUG nova.compute.manager [None req-effb176a-b3b6-4285-a4ab-f148275bd685 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1241.477342] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5f09654-f2ad-4f75-afaa-078562cd73d5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.484185] env[62522]: DEBUG oslo_vmware.api [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416404, 'name': CloneVM_Task} progress is 95%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.982406] env[62522]: DEBUG oslo_vmware.api [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416404, 'name': CloneVM_Task, 'duration_secs': 1.268867} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.983118] env[62522]: INFO nova.virt.vmwareapi.vmops [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Created linked-clone VM from snapshot [ 1241.983721] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9b3f9b1-1f39-4a84-b98f-4db5728103da {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.992714] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Uploading image 72a425d1-613f-40dd-aa4b-ada95eb89bb3 {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1242.019846] env[62522]: DEBUG oslo_vmware.rw_handles [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1242.019846] env[62522]: value = "vm-489865" [ 1242.019846] env[62522]: _type = "VirtualMachine" [ 1242.019846] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1242.020144] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-a35eabc1-c213-43b0-845f-21b35c478081 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.027400] env[62522]: DEBUG oslo_vmware.rw_handles [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lease: (returnval){ [ 1242.027400] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52016984-f419-1f42-2b9f-2bbb1b1b3be4" [ 1242.027400] env[62522]: _type = "HttpNfcLease" [ 1242.027400] env[62522]: } obtained for exporting VM: (result){ [ 1242.027400] env[62522]: value = "vm-489865" [ 1242.027400] env[62522]: _type = "VirtualMachine" [ 1242.027400] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1242.027690] env[62522]: DEBUG oslo_vmware.api [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the lease: (returnval){ [ 1242.027690] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52016984-f419-1f42-2b9f-2bbb1b1b3be4" [ 1242.027690] env[62522]: _type = "HttpNfcLease" [ 1242.027690] env[62522]: } to be ready. {{(pid=62522) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1242.038869] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1242.038869] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52016984-f419-1f42-2b9f-2bbb1b1b3be4" [ 1242.038869] env[62522]: _type = "HttpNfcLease" [ 1242.038869] env[62522]: } is initializing. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1242.535085] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1242.535085] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52016984-f419-1f42-2b9f-2bbb1b1b3be4" [ 1242.535085] env[62522]: _type = "HttpNfcLease" [ 1242.535085] env[62522]: } is ready. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1242.535646] env[62522]: DEBUG oslo_vmware.rw_handles [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1242.535646] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52016984-f419-1f42-2b9f-2bbb1b1b3be4" [ 1242.535646] env[62522]: _type = "HttpNfcLease" [ 1242.535646] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1242.536109] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e634b35e-1342-456d-b6fe-a0cc0a07e49a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.543467] env[62522]: DEBUG oslo_vmware.rw_handles [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f6cae8-2e92-d799-2955-57e126e2f538/disk-0.vmdk from lease info. {{(pid=62522) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1242.543641] env[62522]: DEBUG oslo_vmware.rw_handles [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f6cae8-2e92-d799-2955-57e126e2f538/disk-0.vmdk for reading. {{(pid=62522) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1242.647986] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-60f40532-60b8-481b-880d-6607dec6caa6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.860458] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a8cf5b8a-852e-4b8c-95aa-684c4e51e759 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "0ba51973-2ffe-460c-a4e2-c9e2a2b768b1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1242.861359] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a8cf5b8a-852e-4b8c-95aa-684c4e51e759 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "0ba51973-2ffe-460c-a4e2-c9e2a2b768b1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1242.861359] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a8cf5b8a-852e-4b8c-95aa-684c4e51e759 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "0ba51973-2ffe-460c-a4e2-c9e2a2b768b1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1242.861514] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a8cf5b8a-852e-4b8c-95aa-684c4e51e759 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "0ba51973-2ffe-460c-a4e2-c9e2a2b768b1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1242.861633] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a8cf5b8a-852e-4b8c-95aa-684c4e51e759 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "0ba51973-2ffe-460c-a4e2-c9e2a2b768b1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1242.865591] env[62522]: INFO nova.compute.manager [None req-a8cf5b8a-852e-4b8c-95aa-684c4e51e759 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Terminating instance [ 1243.370625] env[62522]: DEBUG nova.compute.manager [None req-a8cf5b8a-852e-4b8c-95aa-684c4e51e759 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1243.370985] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a8cf5b8a-852e-4b8c-95aa-684c4e51e759 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1243.371983] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3024a81-a65e-4fc1-96ab-68eb4088b962 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.380859] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a8cf5b8a-852e-4b8c-95aa-684c4e51e759 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1243.381205] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8f35826a-9e0f-46db-83c2-7ef4eeeac693 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.443744] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a8cf5b8a-852e-4b8c-95aa-684c4e51e759 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1243.444395] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a8cf5b8a-852e-4b8c-95aa-684c4e51e759 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1243.444654] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8cf5b8a-852e-4b8c-95aa-684c4e51e759 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Deleting the datastore file [datastore1] 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1243.445197] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-31f18665-8444-47f7-9354-55178e1fc6ff {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.456048] env[62522]: DEBUG oslo_vmware.api [None req-a8cf5b8a-852e-4b8c-95aa-684c4e51e759 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1243.456048] env[62522]: value = "task-2416408" [ 1243.456048] env[62522]: _type = "Task" [ 1243.456048] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.464644] env[62522]: DEBUG oslo_vmware.api [None req-a8cf5b8a-852e-4b8c-95aa-684c4e51e759 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416408, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.485243] env[62522]: DEBUG nova.network.neutron [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Port 66b8c64e-5981-4cc9-b51a-df5bce03233c binding to destination host cpu-1 is already ACTIVE {{(pid=62522) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1243.485666] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "refresh_cache-1c6451e0-2fae-4d2b-86d7-86f9537a6259" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1243.486038] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquired lock "refresh_cache-1c6451e0-2fae-4d2b-86d7-86f9537a6259" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1243.486389] env[62522]: DEBUG nova.network.neutron [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1243.966335] env[62522]: DEBUG oslo_vmware.api [None req-a8cf5b8a-852e-4b8c-95aa-684c4e51e759 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416408, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.205753} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.967166] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8cf5b8a-852e-4b8c-95aa-684c4e51e759 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1243.967915] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a8cf5b8a-852e-4b8c-95aa-684c4e51e759 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1243.967915] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-a8cf5b8a-852e-4b8c-95aa-684c4e51e759 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1243.968260] env[62522]: INFO nova.compute.manager [None req-a8cf5b8a-852e-4b8c-95aa-684c4e51e759 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1243.968634] env[62522]: DEBUG oslo.service.loopingcall [None req-a8cf5b8a-852e-4b8c-95aa-684c4e51e759 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1243.970047] env[62522]: DEBUG nova.compute.manager [-] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1243.970047] env[62522]: DEBUG nova.network.neutron [-] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1244.398818] env[62522]: DEBUG nova.network.neutron [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Updating instance_info_cache with network_info: [{"id": "66b8c64e-5981-4cc9-b51a-df5bce03233c", "address": "fa:16:3e:ab:2f:6d", "network": {"id": "2a4f6f01-ad28-4f8f-b835-ea86e1791f49", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1577320995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82346c440c3343a0a5c233a48203a13c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66b8c64e-59", "ovs_interfaceid": "66b8c64e-5981-4cc9-b51a-df5bce03233c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1244.431956] env[62522]: DEBUG nova.compute.manager [req-07ba3470-210e-4db7-8c8e-c54ec4f5cba5 req-e60f1ca2-a6f5-4263-aa85-eb9f5046e035 service nova] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Received event network-vif-deleted-99aa2548-94d3-41e4-8cad-1043809dc3df {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1244.432071] env[62522]: INFO nova.compute.manager [req-07ba3470-210e-4db7-8c8e-c54ec4f5cba5 req-e60f1ca2-a6f5-4263-aa85-eb9f5046e035 service nova] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Neutron deleted interface 99aa2548-94d3-41e4-8cad-1043809dc3df; detaching it from the instance and deleting it from the info cache [ 1244.432285] env[62522]: DEBUG nova.network.neutron [req-07ba3470-210e-4db7-8c8e-c54ec4f5cba5 req-e60f1ca2-a6f5-4263-aa85-eb9f5046e035 service nova] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1244.510744] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a56d750-2091-40c4-ad97-ff0b121e3a30 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Volume attach. Driver type: vmdk {{(pid=62522) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1244.512102] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a56d750-2091-40c4-ad97-ff0b121e3a30 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489864', 'volume_id': '792bdc79-fff9-48a5-b954-77d64857a962', 'name': 'volume-792bdc79-fff9-48a5-b954-77d64857a962', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a4cb5c19-9087-4354-9689-a99ae8924dc1', 'attached_at': '', 'detached_at': '', 'volume_id': '792bdc79-fff9-48a5-b954-77d64857a962', 'serial': '792bdc79-fff9-48a5-b954-77d64857a962'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1244.513259] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f42ff50b-0f48-4acf-9aa8-b92928a44bae {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.535426] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80878a9a-e0cc-49ce-acc1-59158f87b4b3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.564504] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a56d750-2091-40c4-ad97-ff0b121e3a30 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] volume-792bdc79-fff9-48a5-b954-77d64857a962/volume-792bdc79-fff9-48a5-b954-77d64857a962.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1244.565266] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97a72694-6d9d-49a4-afca-fee81f557cac {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.587582] env[62522]: DEBUG oslo_vmware.api [None req-2a56d750-2091-40c4-ad97-ff0b121e3a30 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1244.587582] env[62522]: value = "task-2416409" [ 1244.587582] env[62522]: _type = "Task" [ 1244.587582] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.595590] env[62522]: DEBUG oslo_vmware.api [None req-2a56d750-2091-40c4-ad97-ff0b121e3a30 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416409, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.881280] env[62522]: DEBUG nova.network.neutron [-] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1244.902592] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Releasing lock "refresh_cache-1c6451e0-2fae-4d2b-86d7-86f9537a6259" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1244.935180] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0d1d9d68-beb1-4a89-948f-2e2ea537b427 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.944804] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ca7668-58e3-4a0f-b065-59f853eb379d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.974561] env[62522]: DEBUG nova.compute.manager [req-07ba3470-210e-4db7-8c8e-c54ec4f5cba5 req-e60f1ca2-a6f5-4263-aa85-eb9f5046e035 service nova] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Detach interface failed, port_id=99aa2548-94d3-41e4-8cad-1043809dc3df, reason: Instance 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1245.098148] env[62522]: DEBUG oslo_vmware.api [None req-2a56d750-2091-40c4-ad97-ff0b121e3a30 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416409, 'name': ReconfigVM_Task, 'duration_secs': 0.39411} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.098148] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a56d750-2091-40c4-ad97-ff0b121e3a30 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Reconfigured VM instance instance-0000006a to attach disk [datastore1] volume-792bdc79-fff9-48a5-b954-77d64857a962/volume-792bdc79-fff9-48a5-b954-77d64857a962.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1245.102573] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-59eee136-30b9-442a-96e3-0a98e6e962af {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.117081] env[62522]: DEBUG oslo_vmware.api [None req-2a56d750-2091-40c4-ad97-ff0b121e3a30 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1245.117081] env[62522]: value = "task-2416410" [ 1245.117081] env[62522]: _type = "Task" [ 1245.117081] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.125169] env[62522]: DEBUG oslo_vmware.api [None req-2a56d750-2091-40c4-ad97-ff0b121e3a30 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416410, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.384897] env[62522]: INFO nova.compute.manager [-] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Took 1.42 seconds to deallocate network for instance. [ 1245.406592] env[62522]: DEBUG nova.compute.manager [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62522) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1245.629870] env[62522]: DEBUG oslo_vmware.api [None req-2a56d750-2091-40c4-ad97-ff0b121e3a30 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416410, 'name': ReconfigVM_Task, 'duration_secs': 0.173345} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.629870] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a56d750-2091-40c4-ad97-ff0b121e3a30 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489864', 'volume_id': '792bdc79-fff9-48a5-b954-77d64857a962', 'name': 'volume-792bdc79-fff9-48a5-b954-77d64857a962', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a4cb5c19-9087-4354-9689-a99ae8924dc1', 'attached_at': '', 'detached_at': '', 'volume_id': '792bdc79-fff9-48a5-b954-77d64857a962', 'serial': '792bdc79-fff9-48a5-b954-77d64857a962'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1245.892568] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a8cf5b8a-852e-4b8c-95aa-684c4e51e759 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1245.892842] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a8cf5b8a-852e-4b8c-95aa-684c4e51e759 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1245.893465] env[62522]: DEBUG nova.objects.instance [None req-a8cf5b8a-852e-4b8c-95aa-684c4e51e759 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lazy-loading 'resources' on Instance uuid 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1246.502588] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1246.507170] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92048f70-c845-4fad-9fa4-19ed20e89556 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.514745] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f748b2f3-d933-425f-89e2-4e3e678c287f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.544677] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51c29972-af1c-412b-b6e8-235d9bb88862 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.552641] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8464782e-aa6f-4fb5-b28a-aa2b0f0c9a55 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.565972] env[62522]: DEBUG nova.compute.provider_tree [None req-a8cf5b8a-852e-4b8c-95aa-684c4e51e759 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1246.663081] env[62522]: DEBUG nova.objects.instance [None req-2a56d750-2091-40c4-ad97-ff0b121e3a30 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lazy-loading 'flavor' on Instance uuid a4cb5c19-9087-4354-9689-a99ae8924dc1 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1247.069171] env[62522]: DEBUG nova.scheduler.client.report [None req-a8cf5b8a-852e-4b8c-95aa-684c4e51e759 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1247.169970] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2a56d750-2091-40c4-ad97-ff0b121e3a30 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "a4cb5c19-9087-4354-9689-a99ae8924dc1" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.279s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1247.575795] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a8cf5b8a-852e-4b8c-95aa-684c4e51e759 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.683s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1247.578361] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 1.076s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1247.599283] env[62522]: INFO nova.scheduler.client.report [None req-a8cf5b8a-852e-4b8c-95aa-684c4e51e759 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Deleted allocations for instance 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1 [ 1248.083535] env[62522]: DEBUG nova.objects.instance [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lazy-loading 'migration_context' on Instance uuid 1c6451e0-2fae-4d2b-86d7-86f9537a6259 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1248.107669] env[62522]: DEBUG oslo_concurrency.lockutils [None req-a8cf5b8a-852e-4b8c-95aa-684c4e51e759 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "0ba51973-2ffe-460c-a4e2-c9e2a2b768b1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.246s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1248.381668] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "f74196c1-b00f-4f42-84dc-17b21fa30374" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1248.381931] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "f74196c1-b00f-4f42-84dc-17b21fa30374" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1248.711615] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1abee94a-27c4-494d-88e8-6a079398c3c1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.720303] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7ebb0fa-b602-4ba3-8251-2b523cf8b29b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.751070] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03359dc3-b3bf-4f75-b56f-89de914c2cca {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.758502] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d45be9f5-f756-49da-bb20-bacc8e5f4e4b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.773045] env[62522]: DEBUG nova.compute.provider_tree [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1248.789537] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "961df2ff-bd02-45af-afb8-14a99cfea1de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1248.789762] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "961df2ff-bd02-45af-afb8-14a99cfea1de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1248.884282] env[62522]: DEBUG nova.compute.manager [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1249.276139] env[62522]: DEBUG nova.scheduler.client.report [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1249.291936] env[62522]: DEBUG nova.compute.manager [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1249.399665] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df2fe5b6-7b73-46fe-83b4-eea24f61050b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "27f4b976-7dff-49b0-9b00-7515cb976e72" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1249.399917] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df2fe5b6-7b73-46fe-83b4-eea24f61050b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "27f4b976-7dff-49b0-9b00-7515cb976e72" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1249.400116] env[62522]: DEBUG nova.compute.manager [None req-df2fe5b6-7b73-46fe-83b4-eea24f61050b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1249.400983] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aec7a14c-80ec-4c28-8aa7-bc04aa184931 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.405296] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1249.407496] env[62522]: DEBUG nova.compute.manager [None req-df2fe5b6-7b73-46fe-83b4-eea24f61050b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62522) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1249.408044] env[62522]: DEBUG nova.objects.instance [None req-df2fe5b6-7b73-46fe-83b4-eea24f61050b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lazy-loading 'flavor' on Instance uuid 27f4b976-7dff-49b0-9b00-7515cb976e72 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1249.815549] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1250.286166] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.708s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1250.292336] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.887s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1250.294014] env[62522]: INFO nova.compute.claims [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1250.366352] env[62522]: DEBUG oslo_vmware.rw_handles [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f6cae8-2e92-d799-2955-57e126e2f538/disk-0.vmdk. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1250.367308] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-573792cd-6f65-4fd4-b430-61e6f6ca8e40 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.373967] env[62522]: DEBUG oslo_vmware.rw_handles [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f6cae8-2e92-d799-2955-57e126e2f538/disk-0.vmdk is in state: ready. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1250.374151] env[62522]: ERROR oslo_vmware.rw_handles [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f6cae8-2e92-d799-2955-57e126e2f538/disk-0.vmdk due to incomplete transfer. [ 1250.374361] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-8d50bf38-bda1-43d6-ae29-98ac38df7e14 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.381387] env[62522]: DEBUG oslo_vmware.rw_handles [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f6cae8-2e92-d799-2955-57e126e2f538/disk-0.vmdk. {{(pid=62522) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1250.381579] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Uploaded image 72a425d1-613f-40dd-aa4b-ada95eb89bb3 to the Glance image server {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1250.383656] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Destroying the VM {{(pid=62522) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1250.383913] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0013abe0-972e-4ba7-b5d1-f4e7d01d4a3a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.391236] env[62522]: DEBUG oslo_vmware.api [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1250.391236] env[62522]: value = "task-2416411" [ 1250.391236] env[62522]: _type = "Task" [ 1250.391236] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.398385] env[62522]: DEBUG oslo_vmware.api [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416411, 'name': Destroy_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.414443] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-df2fe5b6-7b73-46fe-83b4-eea24f61050b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1250.414759] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-449e82fe-2a22-4988-980b-aaa31b249016 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.420991] env[62522]: DEBUG oslo_vmware.api [None req-df2fe5b6-7b73-46fe-83b4-eea24f61050b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1250.420991] env[62522]: value = "task-2416412" [ 1250.420991] env[62522]: _type = "Task" [ 1250.420991] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.431026] env[62522]: DEBUG oslo_vmware.api [None req-df2fe5b6-7b73-46fe-83b4-eea24f61050b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416412, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.900266] env[62522]: DEBUG oslo_vmware.api [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416411, 'name': Destroy_Task, 'duration_secs': 0.327971} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.900587] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Destroyed the VM [ 1250.900777] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Deleting Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1250.901027] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-393d40c1-781d-4db2-8baa-d9b0e691f35c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.907829] env[62522]: DEBUG oslo_vmware.api [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1250.907829] env[62522]: value = "task-2416413" [ 1250.907829] env[62522]: _type = "Task" [ 1250.907829] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.914615] env[62522]: DEBUG oslo_vmware.api [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416413, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.929120] env[62522]: DEBUG oslo_vmware.api [None req-df2fe5b6-7b73-46fe-83b4-eea24f61050b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416412, 'name': PowerOffVM_Task, 'duration_secs': 0.244696} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.929458] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-df2fe5b6-7b73-46fe-83b4-eea24f61050b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1250.929696] env[62522]: DEBUG nova.compute.manager [None req-df2fe5b6-7b73-46fe-83b4-eea24f61050b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1250.930493] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e87f046-9ddc-4ae8-82a3-e986c4bf33ed {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.418104] env[62522]: DEBUG oslo_vmware.api [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416413, 'name': RemoveSnapshot_Task, 'duration_secs': 0.386759} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.420328] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Deleted Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1251.420612] env[62522]: DEBUG nova.compute.manager [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1251.421516] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56f86bd4-f61b-40ed-9573-9db69b0bb683 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.429868] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3de3da96-49a4-459a-871a-bda80ef517c2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.437227] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-254da9d2-4ae0-4ab1-a698-b525c4f0df29 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.442351] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df2fe5b6-7b73-46fe-83b4-eea24f61050b tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "27f4b976-7dff-49b0-9b00-7515cb976e72" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.042s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1251.470339] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b6223a3-b989-4429-81f4-a4c16311d996 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.478050] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82a837be-953e-409f-a7af-5ddff88dab8f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.491662] env[62522]: DEBUG nova.compute.provider_tree [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1251.784728] env[62522]: DEBUG nova.objects.instance [None req-2df57b18-fb4c-4b59-b4ad-74caed22b093 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lazy-loading 'flavor' on Instance uuid 27f4b976-7dff-49b0-9b00-7515cb976e72 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1251.829431] env[62522]: INFO nova.compute.manager [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Swapping old allocation on dict_keys(['c7fa38b2-245d-4337-a012-22c1a01c0a72']) held by migration ae46d283-bdf8-472c-bbc7-23e6b987e02b for instance [ 1251.851804] env[62522]: DEBUG nova.scheduler.client.report [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Overwriting current allocation {'allocations': {'c7fa38b2-245d-4337-a012-22c1a01c0a72': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 158}}, 'project_id': '82346c440c3343a0a5c233a48203a13c', 'user_id': 'c7a901dd2575462f9369f3d8819fb86d', 'consumer_generation': 1} on consumer 1c6451e0-2fae-4d2b-86d7-86f9537a6259 {{(pid=62522) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1251.935877] env[62522]: INFO nova.compute.manager [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Shelve offloading [ 1251.949112] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "refresh_cache-1c6451e0-2fae-4d2b-86d7-86f9537a6259" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1251.949309] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquired lock "refresh_cache-1c6451e0-2fae-4d2b-86d7-86f9537a6259" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1251.949487] env[62522]: DEBUG nova.network.neutron [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1251.995056] env[62522]: DEBUG nova.scheduler.client.report [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1252.289784] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2df57b18-fb4c-4b59-b4ad-74caed22b093 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "refresh_cache-27f4b976-7dff-49b0-9b00-7515cb976e72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1252.289946] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2df57b18-fb4c-4b59-b4ad-74caed22b093 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquired lock "refresh_cache-27f4b976-7dff-49b0-9b00-7515cb976e72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1252.290078] env[62522]: DEBUG nova.network.neutron [None req-2df57b18-fb4c-4b59-b4ad-74caed22b093 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1252.290260] env[62522]: DEBUG nova.objects.instance [None req-2df57b18-fb4c-4b59-b4ad-74caed22b093 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lazy-loading 'info_cache' on Instance uuid 27f4b976-7dff-49b0-9b00-7515cb976e72 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1252.439743] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1252.440066] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52302289-c6d9-45fc-bb3c-b371956e9958 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.448758] env[62522]: DEBUG oslo_vmware.api [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1252.448758] env[62522]: value = "task-2416414" [ 1252.448758] env[62522]: _type = "Task" [ 1252.448758] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.459715] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] VM already powered off {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1252.459948] env[62522]: DEBUG nova.compute.manager [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1252.460739] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d2f3814-5825-40da-84bf-12d7a0d250df {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.466590] env[62522]: DEBUG oslo_concurrency.lockutils [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "refresh_cache-cb7a19f1-6093-47ee-bbbc-a75dd5423f32" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1252.466773] env[62522]: DEBUG oslo_concurrency.lockutils [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquired lock "refresh_cache-cb7a19f1-6093-47ee-bbbc-a75dd5423f32" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1252.466920] env[62522]: DEBUG nova.network.neutron [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1252.500796] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.208s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1252.501331] env[62522]: DEBUG nova.compute.manager [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1252.504075] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.689s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1252.505475] env[62522]: INFO nova.compute.claims [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1252.696108] env[62522]: DEBUG nova.network.neutron [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Updating instance_info_cache with network_info: [{"id": "66b8c64e-5981-4cc9-b51a-df5bce03233c", "address": "fa:16:3e:ab:2f:6d", "network": {"id": "2a4f6f01-ad28-4f8f-b835-ea86e1791f49", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1577320995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82346c440c3343a0a5c233a48203a13c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66b8c64e-59", "ovs_interfaceid": "66b8c64e-5981-4cc9-b51a-df5bce03233c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1252.793912] env[62522]: DEBUG nova.objects.base [None req-2df57b18-fb4c-4b59-b4ad-74caed22b093 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Object Instance<27f4b976-7dff-49b0-9b00-7515cb976e72> lazy-loaded attributes: flavor,info_cache {{(pid=62522) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1253.010907] env[62522]: DEBUG nova.compute.utils [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1253.014188] env[62522]: DEBUG nova.compute.manager [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1253.014410] env[62522]: DEBUG nova.network.neutron [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1253.120937] env[62522]: DEBUG nova.policy [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'edec975faaef4f2ba31aa0de30590522', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fa792663b4ac41b7bf4c5e4b290f9b86', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1253.198315] env[62522]: DEBUG oslo_concurrency.lockutils [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Releasing lock "refresh_cache-1c6451e0-2fae-4d2b-86d7-86f9537a6259" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1253.199282] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ab05f76-05e0-4e3b-88dd-2853e3876bb6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.206453] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6c33110-4072-4487-a447-e9b4cbd3e01e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.337800] env[62522]: DEBUG nova.network.neutron [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Updating instance_info_cache with network_info: [{"id": "33665d0f-b7dd-4d62-86d5-8ccb8f178e97", "address": "fa:16:3e:1d:d3:51", "network": {"id": "949f3536-8a7e-4edf-b6cc-6a264fe5fe83", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1891232839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f93394feaa4f4b61a5d3d670d32ec599", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33665d0f-b7", "ovs_interfaceid": "33665d0f-b7dd-4d62-86d5-8ccb8f178e97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1253.499546] env[62522]: DEBUG nova.network.neutron [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Successfully created port: 14494be3-972e-4dae-a55d-bd5b458491d9 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1253.504851] env[62522]: DEBUG nova.network.neutron [None req-2df57b18-fb4c-4b59-b4ad-74caed22b093 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Updating instance_info_cache with network_info: [{"id": "cf4b3978-2fa2-4182-9422-abf29faafcf6", "address": "fa:16:3e:74:26:e7", "network": {"id": "e3153dad-6ab7-45a3-bda4-5ebbd95258f4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-521501740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab9d5d3c27d4c218b88e4a029300a66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf4b3978-2f", "ovs_interfaceid": "cf4b3978-2fa2-4182-9422-abf29faafcf6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1253.514831] env[62522]: DEBUG nova.compute.manager [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1253.649278] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9afabb7-5262-4282-9c6b-3fb92818e7ff {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.657120] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7c76fb1-39a8-4562-9bad-6d1f46bb7bef {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.689475] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22187f09-c38f-4bc8-b392-830edaeb93c2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.698361] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4fafbb8-f24d-484b-a0b5-655655e56c99 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.711815] env[62522]: DEBUG nova.compute.provider_tree [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1253.840560] env[62522]: DEBUG oslo_concurrency.lockutils [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Releasing lock "refresh_cache-cb7a19f1-6093-47ee-bbbc-a75dd5423f32" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1254.007991] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2df57b18-fb4c-4b59-b4ad-74caed22b093 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Releasing lock "refresh_cache-27f4b976-7dff-49b0-9b00-7515cb976e72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1254.136774] env[62522]: DEBUG nova.compute.manager [req-0eaf60ea-213b-4893-8aec-d36ea279c46a req-e27e99d2-8181-4d98-a7b5-9f76bdbd0928 service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Received event network-vif-unplugged-33665d0f-b7dd-4d62-86d5-8ccb8f178e97 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1254.137054] env[62522]: DEBUG oslo_concurrency.lockutils [req-0eaf60ea-213b-4893-8aec-d36ea279c46a req-e27e99d2-8181-4d98-a7b5-9f76bdbd0928 service nova] Acquiring lock "cb7a19f1-6093-47ee-bbbc-a75dd5423f32-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1254.137225] env[62522]: DEBUG oslo_concurrency.lockutils [req-0eaf60ea-213b-4893-8aec-d36ea279c46a req-e27e99d2-8181-4d98-a7b5-9f76bdbd0928 service nova] Lock "cb7a19f1-6093-47ee-bbbc-a75dd5423f32-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1254.137438] env[62522]: DEBUG oslo_concurrency.lockutils [req-0eaf60ea-213b-4893-8aec-d36ea279c46a req-e27e99d2-8181-4d98-a7b5-9f76bdbd0928 service nova] Lock "cb7a19f1-6093-47ee-bbbc-a75dd5423f32-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1254.137568] env[62522]: DEBUG nova.compute.manager [req-0eaf60ea-213b-4893-8aec-d36ea279c46a req-e27e99d2-8181-4d98-a7b5-9f76bdbd0928 service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] No waiting events found dispatching network-vif-unplugged-33665d0f-b7dd-4d62-86d5-8ccb8f178e97 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1254.137736] env[62522]: WARNING nova.compute.manager [req-0eaf60ea-213b-4893-8aec-d36ea279c46a req-e27e99d2-8181-4d98-a7b5-9f76bdbd0928 service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Received unexpected event network-vif-unplugged-33665d0f-b7dd-4d62-86d5-8ccb8f178e97 for instance with vm_state shelved and task_state shelving_offloading. [ 1254.193818] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1254.194791] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44a68155-8ec4-4fb4-97ff-c810dfb8caa8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.202657] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1254.202942] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-43a3db47-a61a-45c4-818a-0ef44bc136c7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.214845] env[62522]: DEBUG nova.scheduler.client.report [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1254.301025] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1254.301377] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-467aea4c-0b35-447e-ad67-d6425e79254d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.309046] env[62522]: DEBUG oslo_vmware.api [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1254.309046] env[62522]: value = "task-2416416" [ 1254.309046] env[62522]: _type = "Task" [ 1254.309046] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.318553] env[62522]: DEBUG oslo_vmware.api [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416416, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.343660] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1254.343925] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1254.344149] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Deleting the datastore file [datastore2] cb7a19f1-6093-47ee-bbbc-a75dd5423f32 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1254.344420] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2c4c3baf-7e54-461d-a1c1-aff3b707a379 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.350832] env[62522]: DEBUG oslo_vmware.api [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1254.350832] env[62522]: value = "task-2416417" [ 1254.350832] env[62522]: _type = "Task" [ 1254.350832] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.359468] env[62522]: DEBUG oslo_vmware.api [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416417, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.526494] env[62522]: DEBUG nova.compute.manager [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1254.555209] env[62522]: DEBUG nova.virt.hardware [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1254.555537] env[62522]: DEBUG nova.virt.hardware [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1254.555751] env[62522]: DEBUG nova.virt.hardware [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1254.555959] env[62522]: DEBUG nova.virt.hardware [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1254.556139] env[62522]: DEBUG nova.virt.hardware [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1254.556336] env[62522]: DEBUG nova.virt.hardware [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1254.556602] env[62522]: DEBUG nova.virt.hardware [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1254.556807] env[62522]: DEBUG nova.virt.hardware [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1254.557048] env[62522]: DEBUG nova.virt.hardware [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1254.557257] env[62522]: DEBUG nova.virt.hardware [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1254.557480] env[62522]: DEBUG nova.virt.hardware [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1254.558712] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2b1bbe7-33f5-4bd2-8b22-f81a80a14e17 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.566989] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a6580b9-dd00-4b53-a542-a445a190489c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.721039] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.217s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1254.721366] env[62522]: DEBUG nova.compute.manager [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1254.821910] env[62522]: DEBUG oslo_vmware.api [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416416, 'name': PowerOffVM_Task, 'duration_secs': 0.250896} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.821910] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1254.822886] env[62522]: DEBUG nova.virt.hardware [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1254.822886] env[62522]: DEBUG nova.virt.hardware [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1254.822886] env[62522]: DEBUG nova.virt.hardware [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1254.823116] env[62522]: DEBUG nova.virt.hardware [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1254.823288] env[62522]: DEBUG nova.virt.hardware [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1254.823336] env[62522]: DEBUG nova.virt.hardware [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1254.823505] env[62522]: DEBUG nova.virt.hardware [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1254.823667] env[62522]: DEBUG nova.virt.hardware [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1254.823847] env[62522]: DEBUG nova.virt.hardware [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1254.824081] env[62522]: DEBUG nova.virt.hardware [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1254.824299] env[62522]: DEBUG nova.virt.hardware [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1254.829394] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a00fed2c-90f7-4cab-b9a2-1623a488ffd9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.844676] env[62522]: DEBUG oslo_vmware.api [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1254.844676] env[62522]: value = "task-2416418" [ 1254.844676] env[62522]: _type = "Task" [ 1254.844676] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.852284] env[62522]: DEBUG oslo_vmware.api [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416418, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.859515] env[62522]: DEBUG oslo_vmware.api [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416417, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147716} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.859765] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1254.859955] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1254.860144] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1254.885426] env[62522]: INFO nova.scheduler.client.report [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Deleted allocations for instance cb7a19f1-6093-47ee-bbbc-a75dd5423f32 [ 1255.012922] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2df57b18-fb4c-4b59-b4ad-74caed22b093 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1255.013275] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d8e545e6-ea27-4e9a-a664-efc494b43ea6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.020527] env[62522]: DEBUG oslo_vmware.api [None req-2df57b18-fb4c-4b59-b4ad-74caed22b093 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1255.020527] env[62522]: value = "task-2416419" [ 1255.020527] env[62522]: _type = "Task" [ 1255.020527] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.027658] env[62522]: DEBUG oslo_vmware.api [None req-2df57b18-fb4c-4b59-b4ad-74caed22b093 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416419, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.094060] env[62522]: DEBUG nova.compute.manager [req-2a206d91-a8d4-44e7-bbcb-379441ad8532 req-911c1ff9-8479-41c9-b43c-0653efc3dbff service nova] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Received event network-vif-plugged-14494be3-972e-4dae-a55d-bd5b458491d9 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1255.094303] env[62522]: DEBUG oslo_concurrency.lockutils [req-2a206d91-a8d4-44e7-bbcb-379441ad8532 req-911c1ff9-8479-41c9-b43c-0653efc3dbff service nova] Acquiring lock "f74196c1-b00f-4f42-84dc-17b21fa30374-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1255.094513] env[62522]: DEBUG oslo_concurrency.lockutils [req-2a206d91-a8d4-44e7-bbcb-379441ad8532 req-911c1ff9-8479-41c9-b43c-0653efc3dbff service nova] Lock "f74196c1-b00f-4f42-84dc-17b21fa30374-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1255.094678] env[62522]: DEBUG oslo_concurrency.lockutils [req-2a206d91-a8d4-44e7-bbcb-379441ad8532 req-911c1ff9-8479-41c9-b43c-0653efc3dbff service nova] Lock "f74196c1-b00f-4f42-84dc-17b21fa30374-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1255.094864] env[62522]: DEBUG nova.compute.manager [req-2a206d91-a8d4-44e7-bbcb-379441ad8532 req-911c1ff9-8479-41c9-b43c-0653efc3dbff service nova] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] No waiting events found dispatching network-vif-plugged-14494be3-972e-4dae-a55d-bd5b458491d9 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1255.095091] env[62522]: WARNING nova.compute.manager [req-2a206d91-a8d4-44e7-bbcb-379441ad8532 req-911c1ff9-8479-41c9-b43c-0653efc3dbff service nova] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Received unexpected event network-vif-plugged-14494be3-972e-4dae-a55d-bd5b458491d9 for instance with vm_state building and task_state spawning. [ 1255.185576] env[62522]: DEBUG nova.network.neutron [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Successfully updated port: 14494be3-972e-4dae-a55d-bd5b458491d9 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1255.226060] env[62522]: DEBUG nova.compute.utils [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1255.227758] env[62522]: DEBUG nova.compute.manager [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1255.227965] env[62522]: DEBUG nova.network.neutron [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1255.283422] env[62522]: DEBUG nova.policy [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f26eeb125397426baca60d80d635c4b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a06421250694a98b13ff34ad816dc75', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1255.355373] env[62522]: DEBUG oslo_vmware.api [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416418, 'name': ReconfigVM_Task, 'duration_secs': 0.146264} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.356259] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-860228d3-1571-403a-9299-fdc8e047d477 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.379672] env[62522]: DEBUG nova.virt.hardware [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1255.379952] env[62522]: DEBUG nova.virt.hardware [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1255.380133] env[62522]: DEBUG nova.virt.hardware [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1255.380320] env[62522]: DEBUG nova.virt.hardware [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1255.380465] env[62522]: DEBUG nova.virt.hardware [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1255.381123] env[62522]: DEBUG nova.virt.hardware [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1255.381394] env[62522]: DEBUG nova.virt.hardware [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1255.381563] env[62522]: DEBUG nova.virt.hardware [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1255.381734] env[62522]: DEBUG nova.virt.hardware [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1255.381936] env[62522]: DEBUG nova.virt.hardware [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1255.382138] env[62522]: DEBUG nova.virt.hardware [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1255.382943] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3226efad-066c-467a-ae64-eecb44bd8bca {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.389779] env[62522]: DEBUG oslo_concurrency.lockutils [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1255.390028] env[62522]: DEBUG oslo_concurrency.lockutils [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1255.390484] env[62522]: DEBUG nova.objects.instance [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lazy-loading 'resources' on Instance uuid cb7a19f1-6093-47ee-bbbc-a75dd5423f32 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1255.392587] env[62522]: DEBUG oslo_vmware.api [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1255.392587] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52defc38-4b9d-f7c3-016a-fd9bdb188bf5" [ 1255.392587] env[62522]: _type = "Task" [ 1255.392587] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.402701] env[62522]: DEBUG oslo_vmware.api [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52defc38-4b9d-f7c3-016a-fd9bdb188bf5, 'name': SearchDatastore_Task, 'duration_secs': 0.008747} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.408762] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Reconfiguring VM instance instance-00000066 to detach disk 2000 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1255.409608] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-69a24efa-dd6d-4a1b-8d58-836c2c544e58 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.434700] env[62522]: DEBUG oslo_vmware.api [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1255.434700] env[62522]: value = "task-2416420" [ 1255.434700] env[62522]: _type = "Task" [ 1255.434700] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.443085] env[62522]: DEBUG oslo_vmware.api [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416420, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.532381] env[62522]: DEBUG oslo_vmware.api [None req-2df57b18-fb4c-4b59-b4ad-74caed22b093 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416419, 'name': PowerOnVM_Task, 'duration_secs': 0.453103} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.532655] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2df57b18-fb4c-4b59-b4ad-74caed22b093 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1255.532853] env[62522]: DEBUG nova.compute.manager [None req-2df57b18-fb4c-4b59-b4ad-74caed22b093 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1255.533685] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aaf1f15-c352-4d00-a30b-4fcd914d3213 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.625906] env[62522]: DEBUG nova.network.neutron [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Successfully created port: a15f47bb-6d26-4faf-91e1-6ce27453f7bf {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1255.688578] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "refresh_cache-f74196c1-b00f-4f42-84dc-17b21fa30374" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1255.688860] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquired lock "refresh_cache-f74196c1-b00f-4f42-84dc-17b21fa30374" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1255.689032] env[62522]: DEBUG nova.network.neutron [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1255.730791] env[62522]: DEBUG nova.compute.manager [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1255.894119] env[62522]: DEBUG nova.objects.instance [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lazy-loading 'numa_topology' on Instance uuid cb7a19f1-6093-47ee-bbbc-a75dd5423f32 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1255.948124] env[62522]: DEBUG oslo_vmware.api [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416420, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.165964] env[62522]: DEBUG nova.compute.manager [req-416005e1-5a67-4ae4-aed2-1c25b462459e req-0c014812-e379-499e-b706-bf3acfba46f6 service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Received event network-changed-33665d0f-b7dd-4d62-86d5-8ccb8f178e97 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1256.166363] env[62522]: DEBUG nova.compute.manager [req-416005e1-5a67-4ae4-aed2-1c25b462459e req-0c014812-e379-499e-b706-bf3acfba46f6 service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Refreshing instance network info cache due to event network-changed-33665d0f-b7dd-4d62-86d5-8ccb8f178e97. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1256.166706] env[62522]: DEBUG oslo_concurrency.lockutils [req-416005e1-5a67-4ae4-aed2-1c25b462459e req-0c014812-e379-499e-b706-bf3acfba46f6 service nova] Acquiring lock "refresh_cache-cb7a19f1-6093-47ee-bbbc-a75dd5423f32" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1256.168543] env[62522]: DEBUG oslo_concurrency.lockutils [req-416005e1-5a67-4ae4-aed2-1c25b462459e req-0c014812-e379-499e-b706-bf3acfba46f6 service nova] Acquired lock "refresh_cache-cb7a19f1-6093-47ee-bbbc-a75dd5423f32" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1256.168543] env[62522]: DEBUG nova.network.neutron [req-416005e1-5a67-4ae4-aed2-1c25b462459e req-0c014812-e379-499e-b706-bf3acfba46f6 service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Refreshing network info cache for port 33665d0f-b7dd-4d62-86d5-8ccb8f178e97 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1256.222879] env[62522]: DEBUG nova.network.neutron [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1256.361821] env[62522]: DEBUG nova.network.neutron [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Updating instance_info_cache with network_info: [{"id": "14494be3-972e-4dae-a55d-bd5b458491d9", "address": "fa:16:3e:de:c2:a3", "network": {"id": "2c9c537f-91b6-4217-8eaf-dc187f4ce7d5", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1154766161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fa792663b4ac41b7bf4c5e4b290f9b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14494be3-97", "ovs_interfaceid": "14494be3-972e-4dae-a55d-bd5b458491d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1256.396865] env[62522]: DEBUG nova.objects.base [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62522) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1256.446642] env[62522]: DEBUG oslo_vmware.api [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416420, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.489364] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0603a5bb-e803-4211-9f89-591518b02f77 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.496527] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad8ffa59-13e3-43cf-9f75-7a195e29996f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.528033] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd123e8-ad08-4da7-a0d5-084309ab32b4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.534946] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a6e01c-bc15-4af4-bd6c-3662242f6072 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.547990] env[62522]: DEBUG nova.compute.provider_tree [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1256.741541] env[62522]: DEBUG nova.compute.manager [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1256.768909] env[62522]: DEBUG nova.virt.hardware [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1256.769193] env[62522]: DEBUG nova.virt.hardware [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1256.769329] env[62522]: DEBUG nova.virt.hardware [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1256.769507] env[62522]: DEBUG nova.virt.hardware [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1256.769657] env[62522]: DEBUG nova.virt.hardware [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1256.769806] env[62522]: DEBUG nova.virt.hardware [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1256.770021] env[62522]: DEBUG nova.virt.hardware [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1256.770318] env[62522]: DEBUG nova.virt.hardware [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1256.770484] env[62522]: DEBUG nova.virt.hardware [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1256.770729] env[62522]: DEBUG nova.virt.hardware [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1256.770985] env[62522]: DEBUG nova.virt.hardware [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1256.771944] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-555e41e7-b6eb-4561-a8ae-2731aeac667e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.780330] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2999933-f2d0-4a1c-aa1e-f6f25c6e7ab1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.864330] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Releasing lock "refresh_cache-f74196c1-b00f-4f42-84dc-17b21fa30374" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1256.864654] env[62522]: DEBUG nova.compute.manager [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Instance network_info: |[{"id": "14494be3-972e-4dae-a55d-bd5b458491d9", "address": "fa:16:3e:de:c2:a3", "network": {"id": "2c9c537f-91b6-4217-8eaf-dc187f4ce7d5", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1154766161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fa792663b4ac41b7bf4c5e4b290f9b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14494be3-97", "ovs_interfaceid": "14494be3-972e-4dae-a55d-bd5b458491d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1256.865126] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:c2:a3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc7aa55d-223a-4157-9137-88dc492f2db2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '14494be3-972e-4dae-a55d-bd5b458491d9', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1256.872892] env[62522]: DEBUG oslo.service.loopingcall [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1256.877017] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1256.877017] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-38c37b1c-e629-47e6-b781-0104944369a9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.894869] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1256.894869] env[62522]: value = "task-2416421" [ 1256.894869] env[62522]: _type = "Task" [ 1256.894869] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.902051] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416421, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.922590] env[62522]: DEBUG nova.network.neutron [req-416005e1-5a67-4ae4-aed2-1c25b462459e req-0c014812-e379-499e-b706-bf3acfba46f6 service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Updated VIF entry in instance network info cache for port 33665d0f-b7dd-4d62-86d5-8ccb8f178e97. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1256.922970] env[62522]: DEBUG nova.network.neutron [req-416005e1-5a67-4ae4-aed2-1c25b462459e req-0c014812-e379-499e-b706-bf3acfba46f6 service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Updating instance_info_cache with network_info: [{"id": "33665d0f-b7dd-4d62-86d5-8ccb8f178e97", "address": "fa:16:3e:1d:d3:51", "network": {"id": "949f3536-8a7e-4edf-b6cc-6a264fe5fe83", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1891232839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f93394feaa4f4b61a5d3d670d32ec599", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap33665d0f-b7", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1256.946937] env[62522]: DEBUG oslo_vmware.api [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416420, 'name': ReconfigVM_Task, 'duration_secs': 1.24055} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.947357] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Reconfigured VM instance instance-00000066 to detach disk 2000 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1256.948310] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64aa7dc9-6e84-44f4-82d5-5a19bcde7166 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.975443] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] 1c6451e0-2fae-4d2b-86d7-86f9537a6259/1c6451e0-2fae-4d2b-86d7-86f9537a6259.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1256.975769] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dbf31660-d751-4876-a265-243588d4cc45 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.996061] env[62522]: DEBUG oslo_vmware.api [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1256.996061] env[62522]: value = "task-2416422" [ 1256.996061] env[62522]: _type = "Task" [ 1256.996061] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.004094] env[62522]: DEBUG oslo_vmware.api [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416422, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.051759] env[62522]: DEBUG nova.scheduler.client.report [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1257.126337] env[62522]: DEBUG nova.compute.manager [req-753c530f-5cd2-4369-9aa3-03c97b0b7c4c req-e30d611f-b3ec-4f85-b1d9-4ff2a6f5d4b3 service nova] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Received event network-changed-14494be3-972e-4dae-a55d-bd5b458491d9 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1257.126970] env[62522]: DEBUG nova.compute.manager [req-753c530f-5cd2-4369-9aa3-03c97b0b7c4c req-e30d611f-b3ec-4f85-b1d9-4ff2a6f5d4b3 service nova] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Refreshing instance network info cache due to event network-changed-14494be3-972e-4dae-a55d-bd5b458491d9. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1257.126970] env[62522]: DEBUG oslo_concurrency.lockutils [req-753c530f-5cd2-4369-9aa3-03c97b0b7c4c req-e30d611f-b3ec-4f85-b1d9-4ff2a6f5d4b3 service nova] Acquiring lock "refresh_cache-f74196c1-b00f-4f42-84dc-17b21fa30374" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1257.126970] env[62522]: DEBUG oslo_concurrency.lockutils [req-753c530f-5cd2-4369-9aa3-03c97b0b7c4c req-e30d611f-b3ec-4f85-b1d9-4ff2a6f5d4b3 service nova] Acquired lock "refresh_cache-f74196c1-b00f-4f42-84dc-17b21fa30374" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1257.127310] env[62522]: DEBUG nova.network.neutron [req-753c530f-5cd2-4369-9aa3-03c97b0b7c4c req-e30d611f-b3ec-4f85-b1d9-4ff2a6f5d4b3 service nova] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Refreshing network info cache for port 14494be3-972e-4dae-a55d-bd5b458491d9 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1257.258173] env[62522]: DEBUG nova.network.neutron [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Successfully updated port: a15f47bb-6d26-4faf-91e1-6ce27453f7bf {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1257.404708] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416421, 'name': CreateVM_Task, 'duration_secs': 0.287957} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.404959] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1257.405568] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1257.405739] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1257.406079] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1257.406329] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24bfd25e-3e23-42b6-ab78-da72ddd599b4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.411127] env[62522]: DEBUG oslo_vmware.api [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1257.411127] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520a2068-0bda-7d8b-b0fb-220f28c22247" [ 1257.411127] env[62522]: _type = "Task" [ 1257.411127] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.418608] env[62522]: DEBUG oslo_vmware.api [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520a2068-0bda-7d8b-b0fb-220f28c22247, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.425264] env[62522]: DEBUG oslo_concurrency.lockutils [req-416005e1-5a67-4ae4-aed2-1c25b462459e req-0c014812-e379-499e-b706-bf3acfba46f6 service nova] Releasing lock "refresh_cache-cb7a19f1-6093-47ee-bbbc-a75dd5423f32" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1257.505076] env[62522]: DEBUG oslo_vmware.api [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416422, 'name': ReconfigVM_Task, 'duration_secs': 0.313476} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.505438] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Reconfigured VM instance instance-00000066 to attach disk [datastore1] 1c6451e0-2fae-4d2b-86d7-86f9537a6259/1c6451e0-2fae-4d2b-86d7-86f9537a6259.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1257.506274] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f9d5150-a7c0-4fa7-97ba-a53c50f5ac48 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.526410] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef3d4160-1441-47f9-963c-01b6617b0818 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.548014] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dab80989-44f6-491f-bf17-24d0c0aa50e8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.570157] env[62522]: DEBUG oslo_concurrency.lockutils [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.180s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1257.574593] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9cadae5-aaf3-42fb-9b4c-41e70bae95a0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.587038] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1257.587038] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e7cce124-0887-4fd2-80b0-934c87c6061c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.593610] env[62522]: DEBUG oslo_vmware.api [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1257.593610] env[62522]: value = "task-2416423" [ 1257.593610] env[62522]: _type = "Task" [ 1257.593610] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.604196] env[62522]: DEBUG oslo_vmware.api [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416423, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.761127] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "refresh_cache-961df2ff-bd02-45af-afb8-14a99cfea1de" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1257.761651] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquired lock "refresh_cache-961df2ff-bd02-45af-afb8-14a99cfea1de" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1257.761651] env[62522]: DEBUG nova.network.neutron [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1257.849853] env[62522]: DEBUG nova.network.neutron [req-753c530f-5cd2-4369-9aa3-03c97b0b7c4c req-e30d611f-b3ec-4f85-b1d9-4ff2a6f5d4b3 service nova] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Updated VIF entry in instance network info cache for port 14494be3-972e-4dae-a55d-bd5b458491d9. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1257.850248] env[62522]: DEBUG nova.network.neutron [req-753c530f-5cd2-4369-9aa3-03c97b0b7c4c req-e30d611f-b3ec-4f85-b1d9-4ff2a6f5d4b3 service nova] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Updating instance_info_cache with network_info: [{"id": "14494be3-972e-4dae-a55d-bd5b458491d9", "address": "fa:16:3e:de:c2:a3", "network": {"id": "2c9c537f-91b6-4217-8eaf-dc187f4ce7d5", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1154766161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fa792663b4ac41b7bf4c5e4b290f9b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14494be3-97", "ovs_interfaceid": "14494be3-972e-4dae-a55d-bd5b458491d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1257.921657] env[62522]: DEBUG oslo_vmware.api [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520a2068-0bda-7d8b-b0fb-220f28c22247, 'name': SearchDatastore_Task, 'duration_secs': 0.030501} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.922051] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1257.922280] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1257.922512] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1257.922657] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1257.922836] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1257.923118] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d12c72af-b53d-4c3d-915b-49a1227b007b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.931534] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1257.931717] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1257.932449] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ac5bc07-6b24-4bdd-873e-6149a273f205 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.937805] env[62522]: DEBUG oslo_vmware.api [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1257.937805] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5232720a-f7e6-9cca-7b77-9d079441a83d" [ 1257.937805] env[62522]: _type = "Task" [ 1257.937805] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.945249] env[62522]: DEBUG oslo_vmware.api [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5232720a-f7e6-9cca-7b77-9d079441a83d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.085058] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "cb7a19f1-6093-47ee-bbbc-a75dd5423f32" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1258.087323] env[62522]: DEBUG oslo_concurrency.lockutils [None req-169b7255-544b-45d1-ac28-e36fe39f8913 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "cb7a19f1-6093-47ee-bbbc-a75dd5423f32" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 21.403s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1258.088156] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "cb7a19f1-6093-47ee-bbbc-a75dd5423f32" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.004s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1258.088345] env[62522]: INFO nova.compute.manager [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Unshelving [ 1258.104413] env[62522]: DEBUG oslo_vmware.api [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416423, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.294260] env[62522]: DEBUG nova.network.neutron [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1258.353503] env[62522]: DEBUG oslo_concurrency.lockutils [req-753c530f-5cd2-4369-9aa3-03c97b0b7c4c req-e30d611f-b3ec-4f85-b1d9-4ff2a6f5d4b3 service nova] Releasing lock "refresh_cache-f74196c1-b00f-4f42-84dc-17b21fa30374" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1258.415548] env[62522]: DEBUG nova.network.neutron [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Updating instance_info_cache with network_info: [{"id": "a15f47bb-6d26-4faf-91e1-6ce27453f7bf", "address": "fa:16:3e:46:c2:20", "network": {"id": "21d0ca22-5509-4f9b-b167-f9fde2dac808", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-547933771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a06421250694a98b13ff34ad816dc75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa15f47bb-6d", "ovs_interfaceid": "a15f47bb-6d26-4faf-91e1-6ce27453f7bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1258.447633] env[62522]: DEBUG oslo_vmware.api [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5232720a-f7e6-9cca-7b77-9d079441a83d, 'name': SearchDatastore_Task, 'duration_secs': 0.020194} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.448402] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-057e319a-8b54-43f8-9772-ebd4d3086d9a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.453248] env[62522]: DEBUG oslo_vmware.api [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1258.453248] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5268dd55-39a1-3632-8778-bb8e1b2abe9c" [ 1258.453248] env[62522]: _type = "Task" [ 1258.453248] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.460656] env[62522]: DEBUG oslo_vmware.api [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5268dd55-39a1-3632-8778-bb8e1b2abe9c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.603883] env[62522]: DEBUG oslo_vmware.api [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416423, 'name': PowerOnVM_Task, 'duration_secs': 0.960133} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.604253] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1258.917880] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Releasing lock "refresh_cache-961df2ff-bd02-45af-afb8-14a99cfea1de" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1258.918242] env[62522]: DEBUG nova.compute.manager [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Instance network_info: |[{"id": "a15f47bb-6d26-4faf-91e1-6ce27453f7bf", "address": "fa:16:3e:46:c2:20", "network": {"id": "21d0ca22-5509-4f9b-b167-f9fde2dac808", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-547933771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a06421250694a98b13ff34ad816dc75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa15f47bb-6d", "ovs_interfaceid": "a15f47bb-6d26-4faf-91e1-6ce27453f7bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1258.918669] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:46:c2:20', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24144f5a-050a-4f1e-8d8c-774dc16dc791', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a15f47bb-6d26-4faf-91e1-6ce27453f7bf', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1258.926576] env[62522]: DEBUG oslo.service.loopingcall [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1258.926789] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1258.927017] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9ad2a539-da35-4326-a323-126b9fea18dd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.946706] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1258.946706] env[62522]: value = "task-2416424" [ 1258.946706] env[62522]: _type = "Task" [ 1258.946706] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.955514] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416424, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.962633] env[62522]: DEBUG oslo_vmware.api [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5268dd55-39a1-3632-8778-bb8e1b2abe9c, 'name': SearchDatastore_Task, 'duration_secs': 0.009645} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.962862] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1258.963133] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] f74196c1-b00f-4f42-84dc-17b21fa30374/f74196c1-b00f-4f42-84dc-17b21fa30374.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1258.963418] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ce6af04f-c0f8-47ce-9bcc-349ab65ace44 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.970858] env[62522]: DEBUG oslo_vmware.api [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1258.970858] env[62522]: value = "task-2416425" [ 1258.970858] env[62522]: _type = "Task" [ 1258.970858] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.980465] env[62522]: DEBUG oslo_vmware.api [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416425, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.116790] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1259.117156] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1259.117409] env[62522]: DEBUG nova.objects.instance [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lazy-loading 'pci_requests' on Instance uuid cb7a19f1-6093-47ee-bbbc-a75dd5423f32 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1259.154112] env[62522]: DEBUG nova.compute.manager [req-fb6592a1-771c-4000-aeda-a9624b69824c req-0f060f28-561e-4edb-958a-ba3ec5c14efe service nova] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Received event network-vif-plugged-a15f47bb-6d26-4faf-91e1-6ce27453f7bf {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1259.154324] env[62522]: DEBUG oslo_concurrency.lockutils [req-fb6592a1-771c-4000-aeda-a9624b69824c req-0f060f28-561e-4edb-958a-ba3ec5c14efe service nova] Acquiring lock "961df2ff-bd02-45af-afb8-14a99cfea1de-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1259.154574] env[62522]: DEBUG oslo_concurrency.lockutils [req-fb6592a1-771c-4000-aeda-a9624b69824c req-0f060f28-561e-4edb-958a-ba3ec5c14efe service nova] Lock "961df2ff-bd02-45af-afb8-14a99cfea1de-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1259.154810] env[62522]: DEBUG oslo_concurrency.lockutils [req-fb6592a1-771c-4000-aeda-a9624b69824c req-0f060f28-561e-4edb-958a-ba3ec5c14efe service nova] Lock "961df2ff-bd02-45af-afb8-14a99cfea1de-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1259.155057] env[62522]: DEBUG nova.compute.manager [req-fb6592a1-771c-4000-aeda-a9624b69824c req-0f060f28-561e-4edb-958a-ba3ec5c14efe service nova] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] No waiting events found dispatching network-vif-plugged-a15f47bb-6d26-4faf-91e1-6ce27453f7bf {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1259.155274] env[62522]: WARNING nova.compute.manager [req-fb6592a1-771c-4000-aeda-a9624b69824c req-0f060f28-561e-4edb-958a-ba3ec5c14efe service nova] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Received unexpected event network-vif-plugged-a15f47bb-6d26-4faf-91e1-6ce27453f7bf for instance with vm_state building and task_state spawning. [ 1259.155490] env[62522]: DEBUG nova.compute.manager [req-fb6592a1-771c-4000-aeda-a9624b69824c req-0f060f28-561e-4edb-958a-ba3ec5c14efe service nova] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Received event network-changed-a15f47bb-6d26-4faf-91e1-6ce27453f7bf {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1259.155678] env[62522]: DEBUG nova.compute.manager [req-fb6592a1-771c-4000-aeda-a9624b69824c req-0f060f28-561e-4edb-958a-ba3ec5c14efe service nova] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Refreshing instance network info cache due to event network-changed-a15f47bb-6d26-4faf-91e1-6ce27453f7bf. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1259.155942] env[62522]: DEBUG oslo_concurrency.lockutils [req-fb6592a1-771c-4000-aeda-a9624b69824c req-0f060f28-561e-4edb-958a-ba3ec5c14efe service nova] Acquiring lock "refresh_cache-961df2ff-bd02-45af-afb8-14a99cfea1de" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1259.156205] env[62522]: DEBUG oslo_concurrency.lockutils [req-fb6592a1-771c-4000-aeda-a9624b69824c req-0f060f28-561e-4edb-958a-ba3ec5c14efe service nova] Acquired lock "refresh_cache-961df2ff-bd02-45af-afb8-14a99cfea1de" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1259.156384] env[62522]: DEBUG nova.network.neutron [req-fb6592a1-771c-4000-aeda-a9624b69824c req-0f060f28-561e-4edb-958a-ba3ec5c14efe service nova] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Refreshing network info cache for port a15f47bb-6d26-4faf-91e1-6ce27453f7bf {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1259.456065] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416424, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.479607] env[62522]: DEBUG oslo_vmware.api [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416425, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.443171} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.479865] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] f74196c1-b00f-4f42-84dc-17b21fa30374/f74196c1-b00f-4f42-84dc-17b21fa30374.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1259.480093] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1259.480351] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1dbeb6f3-9577-4998-8db5-bd4091b3b140 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.487839] env[62522]: DEBUG oslo_vmware.api [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1259.487839] env[62522]: value = "task-2416426" [ 1259.487839] env[62522]: _type = "Task" [ 1259.487839] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.496951] env[62522]: DEBUG oslo_vmware.api [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416426, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.621435] env[62522]: DEBUG nova.objects.instance [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lazy-loading 'numa_topology' on Instance uuid cb7a19f1-6093-47ee-bbbc-a75dd5423f32 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1259.649370] env[62522]: INFO nova.compute.manager [None req-c6dc7d3d-9a67-4c9f-bea0-ef86068c0438 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Updating instance to original state: 'active' [ 1259.850066] env[62522]: DEBUG nova.network.neutron [req-fb6592a1-771c-4000-aeda-a9624b69824c req-0f060f28-561e-4edb-958a-ba3ec5c14efe service nova] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Updated VIF entry in instance network info cache for port a15f47bb-6d26-4faf-91e1-6ce27453f7bf. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1259.850461] env[62522]: DEBUG nova.network.neutron [req-fb6592a1-771c-4000-aeda-a9624b69824c req-0f060f28-561e-4edb-958a-ba3ec5c14efe service nova] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Updating instance_info_cache with network_info: [{"id": "a15f47bb-6d26-4faf-91e1-6ce27453f7bf", "address": "fa:16:3e:46:c2:20", "network": {"id": "21d0ca22-5509-4f9b-b167-f9fde2dac808", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-547933771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a06421250694a98b13ff34ad816dc75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa15f47bb-6d", "ovs_interfaceid": "a15f47bb-6d26-4faf-91e1-6ce27453f7bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1259.957213] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416424, 'name': CreateVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.997394] env[62522]: DEBUG oslo_vmware.api [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416426, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070818} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.997616] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1259.998569] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d74f5d80-3f46-4517-b59a-8624cf2d9aac {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.020497] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Reconfiguring VM instance instance-0000006f to attach disk [datastore2] f74196c1-b00f-4f42-84dc-17b21fa30374/f74196c1-b00f-4f42-84dc-17b21fa30374.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1260.020766] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b88afbc5-8054-4c7f-9f2e-2b64f11b9544 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.042862] env[62522]: DEBUG oslo_vmware.api [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1260.042862] env[62522]: value = "task-2416427" [ 1260.042862] env[62522]: _type = "Task" [ 1260.042862] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.050255] env[62522]: DEBUG oslo_vmware.api [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416427, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.123557] env[62522]: INFO nova.compute.claims [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1260.353334] env[62522]: DEBUG oslo_concurrency.lockutils [req-fb6592a1-771c-4000-aeda-a9624b69824c req-0f060f28-561e-4edb-958a-ba3ec5c14efe service nova] Releasing lock "refresh_cache-961df2ff-bd02-45af-afb8-14a99cfea1de" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1260.457586] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416424, 'name': CreateVM_Task, 'duration_secs': 1.40905} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.457905] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1260.458435] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1260.458607] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1260.458921] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1260.459229] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad8b836f-df8b-4f9d-8684-458f9010bfb3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.463456] env[62522]: DEBUG oslo_vmware.api [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1260.463456] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]528e3896-81cf-c672-504b-2fa94a4921ec" [ 1260.463456] env[62522]: _type = "Task" [ 1260.463456] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.471021] env[62522]: DEBUG oslo_vmware.api [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]528e3896-81cf-c672-504b-2fa94a4921ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.552664] env[62522]: DEBUG oslo_vmware.api [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416427, 'name': ReconfigVM_Task, 'duration_secs': 0.274} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.552961] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Reconfigured VM instance instance-0000006f to attach disk [datastore2] f74196c1-b00f-4f42-84dc-17b21fa30374/f74196c1-b00f-4f42-84dc-17b21fa30374.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1260.553606] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5e9da508-a24f-44a2-a6e6-65b2b1d5f7ed {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.560511] env[62522]: DEBUG oslo_vmware.api [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1260.560511] env[62522]: value = "task-2416428" [ 1260.560511] env[62522]: _type = "Task" [ 1260.560511] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.568669] env[62522]: DEBUG oslo_vmware.api [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416428, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.648541] env[62522]: DEBUG oslo_concurrency.lockutils [None req-edb49903-4a3a-4c1c-9c1e-c7a5d6bfeb1c tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "7406a1a4-a342-475b-ad02-6a29f7c487ee" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1260.648848] env[62522]: DEBUG oslo_concurrency.lockutils [None req-edb49903-4a3a-4c1c-9c1e-c7a5d6bfeb1c tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "7406a1a4-a342-475b-ad02-6a29f7c487ee" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1260.874452] env[62522]: DEBUG oslo_concurrency.lockutils [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "1c6451e0-2fae-4d2b-86d7-86f9537a6259" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1260.874762] env[62522]: DEBUG oslo_concurrency.lockutils [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "1c6451e0-2fae-4d2b-86d7-86f9537a6259" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1260.874988] env[62522]: DEBUG oslo_concurrency.lockutils [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "1c6451e0-2fae-4d2b-86d7-86f9537a6259-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1260.875245] env[62522]: DEBUG oslo_concurrency.lockutils [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "1c6451e0-2fae-4d2b-86d7-86f9537a6259-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1260.875433] env[62522]: DEBUG oslo_concurrency.lockutils [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "1c6451e0-2fae-4d2b-86d7-86f9537a6259-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1260.877589] env[62522]: INFO nova.compute.manager [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Terminating instance [ 1260.974598] env[62522]: DEBUG oslo_vmware.api [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]528e3896-81cf-c672-504b-2fa94a4921ec, 'name': SearchDatastore_Task, 'duration_secs': 0.009411} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.974875] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1260.975231] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1260.975519] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1260.975677] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1260.975864] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1260.976143] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7f742358-9136-4cf5-b9b4-5e8712df2bc2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.984634] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1260.984807] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1260.985493] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a91dfc7-0fa3-41f7-a774-dd8b144eabe4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.992206] env[62522]: DEBUG oslo_vmware.api [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1260.992206] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b70145-cd32-805c-f1a9-349a63f24b3f" [ 1260.992206] env[62522]: _type = "Task" [ 1260.992206] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.999969] env[62522]: DEBUG oslo_vmware.api [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b70145-cd32-805c-f1a9-349a63f24b3f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.069928] env[62522]: DEBUG oslo_vmware.api [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416428, 'name': Rename_Task, 'duration_secs': 0.144696} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.070261] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1261.070519] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4d76a6a2-a5af-4824-8698-25d7cf2cb216 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.077937] env[62522]: DEBUG oslo_vmware.api [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1261.077937] env[62522]: value = "task-2416429" [ 1261.077937] env[62522]: _type = "Task" [ 1261.077937] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.088170] env[62522]: DEBUG oslo_vmware.api [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416429, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.151781] env[62522]: DEBUG nova.compute.utils [None req-edb49903-4a3a-4c1c-9c1e-c7a5d6bfeb1c tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1261.241706] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f3ff31-62a9-4b5e-84a4-a5830a7a766d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.249907] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-995f0dbb-604e-4b03-afa7-fdedac2ac09a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.280806] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-541348ba-87cd-486f-a9ae-fe79bde6b18c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.288387] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c637477-0397-4137-aedc-343dcab609cc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.301782] env[62522]: DEBUG nova.compute.provider_tree [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1261.381802] env[62522]: DEBUG nova.compute.manager [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1261.382141] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1261.382462] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b14104cd-288e-4164-814a-057d0c0de881 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.391542] env[62522]: DEBUG oslo_vmware.api [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1261.391542] env[62522]: value = "task-2416430" [ 1261.391542] env[62522]: _type = "Task" [ 1261.391542] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.400117] env[62522]: DEBUG oslo_vmware.api [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416430, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.508288] env[62522]: DEBUG oslo_vmware.api [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b70145-cd32-805c-f1a9-349a63f24b3f, 'name': SearchDatastore_Task, 'duration_secs': 0.008773} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.509387] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c134e72-6a2f-4c1d-818f-1df757a4f170 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.515765] env[62522]: DEBUG oslo_vmware.api [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1261.515765] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5282588c-dbab-b7aa-7676-49ef3d854bea" [ 1261.515765] env[62522]: _type = "Task" [ 1261.515765] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.523755] env[62522]: DEBUG oslo_vmware.api [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5282588c-dbab-b7aa-7676-49ef3d854bea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.587104] env[62522]: DEBUG oslo_vmware.api [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416429, 'name': PowerOnVM_Task, 'duration_secs': 0.449745} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.587371] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1261.587571] env[62522]: INFO nova.compute.manager [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Took 7.06 seconds to spawn the instance on the hypervisor. [ 1261.587746] env[62522]: DEBUG nova.compute.manager [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1261.588521] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64890549-1dde-487a-ad4f-1f8195297724 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.654460] env[62522]: DEBUG oslo_concurrency.lockutils [None req-edb49903-4a3a-4c1c-9c1e-c7a5d6bfeb1c tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "7406a1a4-a342-475b-ad02-6a29f7c487ee" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1261.805320] env[62522]: DEBUG nova.scheduler.client.report [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1261.900949] env[62522]: DEBUG oslo_vmware.api [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416430, 'name': PowerOffVM_Task, 'duration_secs': 0.18899} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.901307] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1261.901554] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Volume detach. Driver type: vmdk {{(pid=62522) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1261.901752] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489859', 'volume_id': '790e2a55-e79e-4d14-9cf8-bed0cf3d0293', 'name': 'volume-790e2a55-e79e-4d14-9cf8-bed0cf3d0293', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '1c6451e0-2fae-4d2b-86d7-86f9537a6259', 'attached_at': '2025-02-10T12:29:25.000000', 'detached_at': '', 'volume_id': '790e2a55-e79e-4d14-9cf8-bed0cf3d0293', 'serial': '790e2a55-e79e-4d14-9cf8-bed0cf3d0293'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1261.902511] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3505bd85-16d2-4667-9776-955e2acee715 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.922778] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfed1121-c122-4668-afb4-83a6b3903f42 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.929116] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba8d86ce-8619-4dec-88db-a98b991156aa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.950285] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f19f7fed-dbff-46e2-bada-16ecf01aa384 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.965352] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] The volume has not been displaced from its original location: [datastore1] volume-790e2a55-e79e-4d14-9cf8-bed0cf3d0293/volume-790e2a55-e79e-4d14-9cf8-bed0cf3d0293.vmdk. No consolidation needed. {{(pid=62522) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1261.970614] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Reconfiguring VM instance instance-00000066 to detach disk 2001 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1261.970912] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ffe2e04-acef-4326-b333-e1c5aa2521fa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.988388] env[62522]: DEBUG oslo_vmware.api [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1261.988388] env[62522]: value = "task-2416431" [ 1261.988388] env[62522]: _type = "Task" [ 1261.988388] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.999974] env[62522]: DEBUG oslo_vmware.api [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416431, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.025382] env[62522]: DEBUG oslo_vmware.api [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5282588c-dbab-b7aa-7676-49ef3d854bea, 'name': SearchDatastore_Task, 'duration_secs': 0.010288} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.025629] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1262.025901] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 961df2ff-bd02-45af-afb8-14a99cfea1de/961df2ff-bd02-45af-afb8-14a99cfea1de.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1262.026185] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8044395b-55f2-4d44-b727-95484e7cad73 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.032793] env[62522]: DEBUG oslo_vmware.api [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1262.032793] env[62522]: value = "task-2416432" [ 1262.032793] env[62522]: _type = "Task" [ 1262.032793] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.039947] env[62522]: DEBUG oslo_vmware.api [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416432, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.105075] env[62522]: INFO nova.compute.manager [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Took 12.71 seconds to build instance. [ 1262.311177] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.194s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1262.346148] env[62522]: INFO nova.network.neutron [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Updating port 33665d0f-b7dd-4d62-86d5-8ccb8f178e97 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1262.500598] env[62522]: DEBUG oslo_vmware.api [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416431, 'name': ReconfigVM_Task, 'duration_secs': 0.225844} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.500942] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Reconfigured VM instance instance-00000066 to detach disk 2001 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1262.505752] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3cf85760-7cc8-49d2-8d1f-1f8db917c89c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.522628] env[62522]: DEBUG oslo_vmware.api [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1262.522628] env[62522]: value = "task-2416433" [ 1262.522628] env[62522]: _type = "Task" [ 1262.522628] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.531020] env[62522]: DEBUG oslo_vmware.api [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416433, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.542251] env[62522]: DEBUG oslo_vmware.api [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416432, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.481359} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.542538] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 961df2ff-bd02-45af-afb8-14a99cfea1de/961df2ff-bd02-45af-afb8-14a99cfea1de.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1262.542758] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1262.543033] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8e228c20-6248-4ed2-ae9e-4e1a44d94ec2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.550173] env[62522]: DEBUG oslo_vmware.api [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1262.550173] env[62522]: value = "task-2416434" [ 1262.550173] env[62522]: _type = "Task" [ 1262.550173] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.560391] env[62522]: DEBUG oslo_vmware.api [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416434, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.607357] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0028b0d7-f256-4eed-bce0-95d0a35be8f3 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "f74196c1-b00f-4f42-84dc-17b21fa30374" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.225s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1262.734910] env[62522]: DEBUG oslo_concurrency.lockutils [None req-edb49903-4a3a-4c1c-9c1e-c7a5d6bfeb1c tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "7406a1a4-a342-475b-ad02-6a29f7c487ee" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1262.735275] env[62522]: DEBUG oslo_concurrency.lockutils [None req-edb49903-4a3a-4c1c-9c1e-c7a5d6bfeb1c tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "7406a1a4-a342-475b-ad02-6a29f7c487ee" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1262.735523] env[62522]: INFO nova.compute.manager [None req-edb49903-4a3a-4c1c-9c1e-c7a5d6bfeb1c tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Attaching volume da41f036-456a-409e-a359-6157800d323c to /dev/sdb [ 1262.779011] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef5334d2-20fb-408b-bbdd-7b72fed4bc8d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.787339] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d025c67-81dc-4fb0-ba3c-6e3efe61dbf0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.800732] env[62522]: DEBUG nova.virt.block_device [None req-edb49903-4a3a-4c1c-9c1e-c7a5d6bfeb1c tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Updating existing volume attachment record: b21095a7-599d-4c4a-97ea-c31ca164cf55 {{(pid=62522) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1262.912335] env[62522]: DEBUG nova.compute.manager [req-c0680ba7-d5e7-40b8-8f57-0a46a7d6768d req-6ed996ed-7179-4ba1-b889-95234a29daaa service nova] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Received event network-changed-14494be3-972e-4dae-a55d-bd5b458491d9 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1262.912624] env[62522]: DEBUG nova.compute.manager [req-c0680ba7-d5e7-40b8-8f57-0a46a7d6768d req-6ed996ed-7179-4ba1-b889-95234a29daaa service nova] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Refreshing instance network info cache due to event network-changed-14494be3-972e-4dae-a55d-bd5b458491d9. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1262.912840] env[62522]: DEBUG oslo_concurrency.lockutils [req-c0680ba7-d5e7-40b8-8f57-0a46a7d6768d req-6ed996ed-7179-4ba1-b889-95234a29daaa service nova] Acquiring lock "refresh_cache-f74196c1-b00f-4f42-84dc-17b21fa30374" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1262.912986] env[62522]: DEBUG oslo_concurrency.lockutils [req-c0680ba7-d5e7-40b8-8f57-0a46a7d6768d req-6ed996ed-7179-4ba1-b889-95234a29daaa service nova] Acquired lock "refresh_cache-f74196c1-b00f-4f42-84dc-17b21fa30374" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1262.913164] env[62522]: DEBUG nova.network.neutron [req-c0680ba7-d5e7-40b8-8f57-0a46a7d6768d req-6ed996ed-7179-4ba1-b889-95234a29daaa service nova] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Refreshing network info cache for port 14494be3-972e-4dae-a55d-bd5b458491d9 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1263.033763] env[62522]: DEBUG oslo_vmware.api [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416433, 'name': ReconfigVM_Task, 'duration_secs': 0.143651} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.034528] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489859', 'volume_id': '790e2a55-e79e-4d14-9cf8-bed0cf3d0293', 'name': 'volume-790e2a55-e79e-4d14-9cf8-bed0cf3d0293', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '1c6451e0-2fae-4d2b-86d7-86f9537a6259', 'attached_at': '2025-02-10T12:29:25.000000', 'detached_at': '', 'volume_id': '790e2a55-e79e-4d14-9cf8-bed0cf3d0293', 'serial': '790e2a55-e79e-4d14-9cf8-bed0cf3d0293'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1263.034868] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1263.035644] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d9ec765-e2cb-4a95-876e-365cacedcf82 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.042132] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1263.042361] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dad5f815-d8cd-4f05-86f7-c321553225fb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.059294] env[62522]: DEBUG oslo_vmware.api [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416434, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068073} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.059528] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1263.060289] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd4c8650-32f9-4893-9bfb-17de6838471f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.082468] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] 961df2ff-bd02-45af-afb8-14a99cfea1de/961df2ff-bd02-45af-afb8-14a99cfea1de.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1263.082696] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-036ac90a-f435-4513-bfee-715eccd90f84 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.102103] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1263.102337] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1263.102516] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Deleting the datastore file [datastore1] 1c6451e0-2fae-4d2b-86d7-86f9537a6259 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1263.104079] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fa66e9a0-24c6-435c-9840-b5db8efe971d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.105886] env[62522]: DEBUG oslo_vmware.api [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1263.105886] env[62522]: value = "task-2416439" [ 1263.105886] env[62522]: _type = "Task" [ 1263.105886] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.110307] env[62522]: DEBUG oslo_vmware.api [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1263.110307] env[62522]: value = "task-2416440" [ 1263.110307] env[62522]: _type = "Task" [ 1263.110307] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.116336] env[62522]: DEBUG oslo_vmware.api [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416439, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.120780] env[62522]: DEBUG oslo_vmware.api [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416440, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.619321] env[62522]: DEBUG oslo_vmware.api [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416439, 'name': ReconfigVM_Task, 'duration_secs': 0.339611} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.622280] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Reconfigured VM instance instance-00000070 to attach disk [datastore2] 961df2ff-bd02-45af-afb8-14a99cfea1de/961df2ff-bd02-45af-afb8-14a99cfea1de.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1263.622962] env[62522]: DEBUG oslo_vmware.api [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416440, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14451} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.623220] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-88683869-9ff6-4b85-abae-4f1b41c4cd4f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.624912] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1263.625119] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1263.625314] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1263.625481] env[62522]: INFO nova.compute.manager [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Took 2.24 seconds to destroy the instance on the hypervisor. [ 1263.625719] env[62522]: DEBUG oslo.service.loopingcall [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1263.625915] env[62522]: DEBUG nova.compute.manager [-] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1263.626016] env[62522]: DEBUG nova.network.neutron [-] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1263.632526] env[62522]: DEBUG oslo_vmware.api [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1263.632526] env[62522]: value = "task-2416441" [ 1263.632526] env[62522]: _type = "Task" [ 1263.632526] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.640270] env[62522]: DEBUG oslo_vmware.api [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416441, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.644112] env[62522]: DEBUG nova.network.neutron [req-c0680ba7-d5e7-40b8-8f57-0a46a7d6768d req-6ed996ed-7179-4ba1-b889-95234a29daaa service nova] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Updated VIF entry in instance network info cache for port 14494be3-972e-4dae-a55d-bd5b458491d9. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1263.644474] env[62522]: DEBUG nova.network.neutron [req-c0680ba7-d5e7-40b8-8f57-0a46a7d6768d req-6ed996ed-7179-4ba1-b889-95234a29daaa service nova] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Updating instance_info_cache with network_info: [{"id": "14494be3-972e-4dae-a55d-bd5b458491d9", "address": "fa:16:3e:de:c2:a3", "network": {"id": "2c9c537f-91b6-4217-8eaf-dc187f4ce7d5", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1154766161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fa792663b4ac41b7bf4c5e4b290f9b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14494be3-97", "ovs_interfaceid": "14494be3-972e-4dae-a55d-bd5b458491d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1264.121151] env[62522]: DEBUG nova.compute.manager [req-cd2e60c0-7580-4b06-acbd-b6b496f9949a req-4ce9cb2b-1274-4ef2-a164-aacc46a348e7 service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Received event network-vif-plugged-33665d0f-b7dd-4d62-86d5-8ccb8f178e97 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1264.121424] env[62522]: DEBUG oslo_concurrency.lockutils [req-cd2e60c0-7580-4b06-acbd-b6b496f9949a req-4ce9cb2b-1274-4ef2-a164-aacc46a348e7 service nova] Acquiring lock "cb7a19f1-6093-47ee-bbbc-a75dd5423f32-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1264.121668] env[62522]: DEBUG oslo_concurrency.lockutils [req-cd2e60c0-7580-4b06-acbd-b6b496f9949a req-4ce9cb2b-1274-4ef2-a164-aacc46a348e7 service nova] Lock "cb7a19f1-6093-47ee-bbbc-a75dd5423f32-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1264.121859] env[62522]: DEBUG oslo_concurrency.lockutils [req-cd2e60c0-7580-4b06-acbd-b6b496f9949a req-4ce9cb2b-1274-4ef2-a164-aacc46a348e7 service nova] Lock "cb7a19f1-6093-47ee-bbbc-a75dd5423f32-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1264.122691] env[62522]: DEBUG nova.compute.manager [req-cd2e60c0-7580-4b06-acbd-b6b496f9949a req-4ce9cb2b-1274-4ef2-a164-aacc46a348e7 service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] No waiting events found dispatching network-vif-plugged-33665d0f-b7dd-4d62-86d5-8ccb8f178e97 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1264.122965] env[62522]: WARNING nova.compute.manager [req-cd2e60c0-7580-4b06-acbd-b6b496f9949a req-4ce9cb2b-1274-4ef2-a164-aacc46a348e7 service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Received unexpected event network-vif-plugged-33665d0f-b7dd-4d62-86d5-8ccb8f178e97 for instance with vm_state shelved_offloaded and task_state spawning. [ 1264.142734] env[62522]: DEBUG oslo_vmware.api [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416441, 'name': Rename_Task, 'duration_secs': 0.462531} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.143034] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1264.143313] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-048758c7-a52b-4165-af6e-ba8fa8860a32 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.146704] env[62522]: DEBUG oslo_concurrency.lockutils [req-c0680ba7-d5e7-40b8-8f57-0a46a7d6768d req-6ed996ed-7179-4ba1-b889-95234a29daaa service nova] Releasing lock "refresh_cache-f74196c1-b00f-4f42-84dc-17b21fa30374" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1264.151039] env[62522]: DEBUG oslo_vmware.api [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1264.151039] env[62522]: value = "task-2416442" [ 1264.151039] env[62522]: _type = "Task" [ 1264.151039] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.159958] env[62522]: DEBUG oslo_vmware.api [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416442, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.316229] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "refresh_cache-cb7a19f1-6093-47ee-bbbc-a75dd5423f32" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1264.316702] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquired lock "refresh_cache-cb7a19f1-6093-47ee-bbbc-a75dd5423f32" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1264.316950] env[62522]: DEBUG nova.network.neutron [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1264.661209] env[62522]: DEBUG oslo_vmware.api [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416442, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.788930] env[62522]: DEBUG nova.network.neutron [-] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1265.108046] env[62522]: DEBUG nova.network.neutron [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Updating instance_info_cache with network_info: [{"id": "33665d0f-b7dd-4d62-86d5-8ccb8f178e97", "address": "fa:16:3e:1d:d3:51", "network": {"id": "949f3536-8a7e-4edf-b6cc-6a264fe5fe83", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1891232839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f93394feaa4f4b61a5d3d670d32ec599", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33665d0f-b7", "ovs_interfaceid": "33665d0f-b7dd-4d62-86d5-8ccb8f178e97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1265.160541] env[62522]: DEBUG oslo_vmware.api [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416442, 'name': PowerOnVM_Task, 'duration_secs': 0.513868} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.160789] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1265.160986] env[62522]: INFO nova.compute.manager [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Took 8.42 seconds to spawn the instance on the hypervisor. [ 1265.161187] env[62522]: DEBUG nova.compute.manager [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1265.161922] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa8924c-ae90-4043-8f5f-966bb71ef857 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.291510] env[62522]: INFO nova.compute.manager [-] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Took 1.67 seconds to deallocate network for instance. [ 1265.610854] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Releasing lock "refresh_cache-cb7a19f1-6093-47ee-bbbc-a75dd5423f32" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1265.639860] env[62522]: DEBUG nova.virt.hardware [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='b4009d1c2929bbeb52eee4defacf70a0',container_format='bare',created_at=2025-02-10T12:29:07Z,direct_url=,disk_format='vmdk',id=72a425d1-613f-40dd-aa4b-ada95eb89bb3,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1711633982-shelved',owner='f93394feaa4f4b61a5d3d670d32ec599',properties=ImageMetaProps,protected=,size=31663104,status='active',tags=,updated_at=2025-02-10T12:29:22Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1265.640223] env[62522]: DEBUG nova.virt.hardware [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1265.640452] env[62522]: DEBUG nova.virt.hardware [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1265.640703] env[62522]: DEBUG nova.virt.hardware [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1265.640910] env[62522]: DEBUG nova.virt.hardware [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1265.641151] env[62522]: DEBUG nova.virt.hardware [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1265.641486] env[62522]: DEBUG nova.virt.hardware [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1265.641724] env[62522]: DEBUG nova.virt.hardware [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1265.641957] env[62522]: DEBUG nova.virt.hardware [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1265.642219] env[62522]: DEBUG nova.virt.hardware [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1265.642439] env[62522]: DEBUG nova.virt.hardware [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1265.643326] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-631145bc-b865-4b67-b09e-8a0e5c250653 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.651165] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30c86558-3e99-4830-a6db-be720f244fe7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.663934] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1d:d3:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '983826cf-6390-4ec6-bf97-30a1060947fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '33665d0f-b7dd-4d62-86d5-8ccb8f178e97', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1265.671203] env[62522]: DEBUG oslo.service.loopingcall [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1265.675029] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1265.676032] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-96de9e96-e695-45ed-9956-7e19862ff75c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.691088] env[62522]: INFO nova.compute.manager [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Took 15.89 seconds to build instance. [ 1265.695083] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1265.695083] env[62522]: value = "task-2416444" [ 1265.695083] env[62522]: _type = "Task" [ 1265.695083] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.701972] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416444, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.836719] env[62522]: INFO nova.compute.manager [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Took 0.54 seconds to detach 1 volumes for instance. [ 1266.166382] env[62522]: DEBUG nova.compute.manager [req-422c4735-7ff5-4a75-8321-2e24782af563 req-8975ab64-12c7-49f5-9132-6574dddcceeb service nova] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Received event network-vif-deleted-66b8c64e-5981-4cc9-b51a-df5bce03233c {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1266.166598] env[62522]: DEBUG nova.compute.manager [req-422c4735-7ff5-4a75-8321-2e24782af563 req-8975ab64-12c7-49f5-9132-6574dddcceeb service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Received event network-changed-33665d0f-b7dd-4d62-86d5-8ccb8f178e97 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1266.166764] env[62522]: DEBUG nova.compute.manager [req-422c4735-7ff5-4a75-8321-2e24782af563 req-8975ab64-12c7-49f5-9132-6574dddcceeb service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Refreshing instance network info cache due to event network-changed-33665d0f-b7dd-4d62-86d5-8ccb8f178e97. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1266.166978] env[62522]: DEBUG oslo_concurrency.lockutils [req-422c4735-7ff5-4a75-8321-2e24782af563 req-8975ab64-12c7-49f5-9132-6574dddcceeb service nova] Acquiring lock "refresh_cache-cb7a19f1-6093-47ee-bbbc-a75dd5423f32" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1266.167136] env[62522]: DEBUG oslo_concurrency.lockutils [req-422c4735-7ff5-4a75-8321-2e24782af563 req-8975ab64-12c7-49f5-9132-6574dddcceeb service nova] Acquired lock "refresh_cache-cb7a19f1-6093-47ee-bbbc-a75dd5423f32" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1266.167302] env[62522]: DEBUG nova.network.neutron [req-422c4735-7ff5-4a75-8321-2e24782af563 req-8975ab64-12c7-49f5-9132-6574dddcceeb service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Refreshing network info cache for port 33665d0f-b7dd-4d62-86d5-8ccb8f178e97 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1266.192822] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bcb01176-22df-495b-bf66-759e020d839e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "961df2ff-bd02-45af-afb8-14a99cfea1de" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.403s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1266.205511] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416444, 'name': CreateVM_Task, 'duration_secs': 0.344814} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1266.205678] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1266.206349] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/72a425d1-613f-40dd-aa4b-ada95eb89bb3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1266.206523] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquired lock "[datastore1] devstack-image-cache_base/72a425d1-613f-40dd-aa4b-ada95eb89bb3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1266.206893] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/72a425d1-613f-40dd-aa4b-ada95eb89bb3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1266.207159] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4761e96a-81c7-4c4a-8f9b-4375e06d0e42 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.211928] env[62522]: DEBUG oslo_vmware.api [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1266.211928] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5231e17a-0479-fd1c-8945-206be16a3614" [ 1266.211928] env[62522]: _type = "Task" [ 1266.211928] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.219294] env[62522]: DEBUG oslo_vmware.api [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5231e17a-0479-fd1c-8945-206be16a3614, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.343312] env[62522]: DEBUG oslo_concurrency.lockutils [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1266.343640] env[62522]: DEBUG oslo_concurrency.lockutils [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1266.343873] env[62522]: DEBUG oslo_concurrency.lockutils [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1266.371093] env[62522]: INFO nova.scheduler.client.report [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Deleted allocations for instance 1c6451e0-2fae-4d2b-86d7-86f9537a6259 [ 1266.722688] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Releasing lock "[datastore1] devstack-image-cache_base/72a425d1-613f-40dd-aa4b-ada95eb89bb3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1266.723084] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Processing image 72a425d1-613f-40dd-aa4b-ada95eb89bb3 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1266.723168] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/72a425d1-613f-40dd-aa4b-ada95eb89bb3/72a425d1-613f-40dd-aa4b-ada95eb89bb3.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1266.723317] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquired lock "[datastore1] devstack-image-cache_base/72a425d1-613f-40dd-aa4b-ada95eb89bb3/72a425d1-613f-40dd-aa4b-ada95eb89bb3.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1266.723495] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1266.723733] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f8cb3302-5471-4c0c-a639-05823ed53734 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.731459] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1266.731647] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1266.732331] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4c555f6-a99a-4145-8597-1f589f33dce0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.739099] env[62522]: DEBUG oslo_vmware.api [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1266.739099] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52cd9684-9c11-e668-6050-5f68bba00289" [ 1266.739099] env[62522]: _type = "Task" [ 1266.739099] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.746325] env[62522]: DEBUG oslo_vmware.api [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52cd9684-9c11-e668-6050-5f68bba00289, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.875499] env[62522]: DEBUG nova.network.neutron [req-422c4735-7ff5-4a75-8321-2e24782af563 req-8975ab64-12c7-49f5-9132-6574dddcceeb service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Updated VIF entry in instance network info cache for port 33665d0f-b7dd-4d62-86d5-8ccb8f178e97. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1266.875875] env[62522]: DEBUG nova.network.neutron [req-422c4735-7ff5-4a75-8321-2e24782af563 req-8975ab64-12c7-49f5-9132-6574dddcceeb service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Updating instance_info_cache with network_info: [{"id": "33665d0f-b7dd-4d62-86d5-8ccb8f178e97", "address": "fa:16:3e:1d:d3:51", "network": {"id": "949f3536-8a7e-4edf-b6cc-6a264fe5fe83", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1891232839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f93394feaa4f4b61a5d3d670d32ec599", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33665d0f-b7", "ovs_interfaceid": "33665d0f-b7dd-4d62-86d5-8ccb8f178e97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1266.879196] env[62522]: DEBUG oslo_concurrency.lockutils [None req-afdca260-70e5-485d-85f9-282ec1a4722b tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "1c6451e0-2fae-4d2b-86d7-86f9537a6259" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.004s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1267.249986] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Preparing fetch location {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1267.250311] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Fetch image to [datastore1] OSTACK_IMG_6c8ae5b9-7475-427b-9a1d-bc308a8ea840/OSTACK_IMG_6c8ae5b9-7475-427b-9a1d-bc308a8ea840.vmdk {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1267.250521] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Downloading stream optimized image 72a425d1-613f-40dd-aa4b-ada95eb89bb3 to [datastore1] OSTACK_IMG_6c8ae5b9-7475-427b-9a1d-bc308a8ea840/OSTACK_IMG_6c8ae5b9-7475-427b-9a1d-bc308a8ea840.vmdk on the data store datastore1 as vApp {{(pid=62522) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1267.250729] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Downloading image file data 72a425d1-613f-40dd-aa4b-ada95eb89bb3 to the ESX as VM named 'OSTACK_IMG_6c8ae5b9-7475-427b-9a1d-bc308a8ea840' {{(pid=62522) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1267.329199] env[62522]: DEBUG oslo_vmware.rw_handles [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1267.329199] env[62522]: value = "resgroup-9" [ 1267.329199] env[62522]: _type = "ResourcePool" [ 1267.329199] env[62522]: }. {{(pid=62522) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1267.329521] env[62522]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-7cd15e4f-d02b-4e1f-82bf-60018f494948 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.346218] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-edb49903-4a3a-4c1c-9c1e-c7a5d6bfeb1c tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Volume attach. Driver type: vmdk {{(pid=62522) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1267.346463] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-edb49903-4a3a-4c1c-9c1e-c7a5d6bfeb1c tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489869', 'volume_id': 'da41f036-456a-409e-a359-6157800d323c', 'name': 'volume-da41f036-456a-409e-a359-6157800d323c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7406a1a4-a342-475b-ad02-6a29f7c487ee', 'attached_at': '', 'detached_at': '', 'volume_id': 'da41f036-456a-409e-a359-6157800d323c', 'serial': 'da41f036-456a-409e-a359-6157800d323c'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1267.347353] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca04c63-1826-4175-a34c-a9f7088cf3bf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.363943] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92024f37-9277-4e73-b039-3f794ffbb1b3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.366643] env[62522]: DEBUG oslo_vmware.rw_handles [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lease: (returnval){ [ 1267.366643] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5235f480-61dd-9c82-123b-ee6f70d2d45a" [ 1267.366643] env[62522]: _type = "HttpNfcLease" [ 1267.366643] env[62522]: } obtained for vApp import into resource pool (val){ [ 1267.366643] env[62522]: value = "resgroup-9" [ 1267.366643] env[62522]: _type = "ResourcePool" [ 1267.366643] env[62522]: }. {{(pid=62522) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1267.366980] env[62522]: DEBUG oslo_vmware.api [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the lease: (returnval){ [ 1267.366980] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5235f480-61dd-9c82-123b-ee6f70d2d45a" [ 1267.366980] env[62522]: _type = "HttpNfcLease" [ 1267.366980] env[62522]: } to be ready. {{(pid=62522) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1267.382577] env[62522]: DEBUG oslo_concurrency.lockutils [req-422c4735-7ff5-4a75-8321-2e24782af563 req-8975ab64-12c7-49f5-9132-6574dddcceeb service nova] Releasing lock "refresh_cache-cb7a19f1-6093-47ee-bbbc-a75dd5423f32" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1267.391527] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-edb49903-4a3a-4c1c-9c1e-c7a5d6bfeb1c tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] volume-da41f036-456a-409e-a359-6157800d323c/volume-da41f036-456a-409e-a359-6157800d323c.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1267.392264] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-221e6e10-764a-4484-9a32-aed6a0718e47 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.406928] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1267.406928] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5235f480-61dd-9c82-123b-ee6f70d2d45a" [ 1267.406928] env[62522]: _type = "HttpNfcLease" [ 1267.406928] env[62522]: } is initializing. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1267.412431] env[62522]: DEBUG oslo_vmware.api [None req-edb49903-4a3a-4c1c-9c1e-c7a5d6bfeb1c tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1267.412431] env[62522]: value = "task-2416446" [ 1267.412431] env[62522]: _type = "Task" [ 1267.412431] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.422566] env[62522]: DEBUG oslo_vmware.api [None req-edb49903-4a3a-4c1c-9c1e-c7a5d6bfeb1c tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416446, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.470902] env[62522]: DEBUG nova.compute.manager [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Stashing vm_state: active {{(pid=62522) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1267.875397] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1267.875397] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5235f480-61dd-9c82-123b-ee6f70d2d45a" [ 1267.875397] env[62522]: _type = "HttpNfcLease" [ 1267.875397] env[62522]: } is ready. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1267.875665] env[62522]: DEBUG oslo_vmware.rw_handles [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1267.875665] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5235f480-61dd-9c82-123b-ee6f70d2d45a" [ 1267.875665] env[62522]: _type = "HttpNfcLease" [ 1267.875665] env[62522]: }. {{(pid=62522) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1267.876378] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28472e4e-f629-4979-8d39-cb64c1858d04 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.883306] env[62522]: DEBUG oslo_vmware.rw_handles [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52632b64-8e9e-e476-859b-a4583ff9e165/disk-0.vmdk from lease info. {{(pid=62522) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1267.883480] env[62522]: DEBUG oslo_vmware.rw_handles [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Creating HTTP connection to write to file with size = 31663104 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52632b64-8e9e-e476-859b-a4583ff9e165/disk-0.vmdk. {{(pid=62522) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1267.949385] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ef7e2a67-9319-4e11-bd00-0f7b1bdf9add {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.951471] env[62522]: DEBUG oslo_vmware.api [None req-edb49903-4a3a-4c1c-9c1e-c7a5d6bfeb1c tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416446, 'name': ReconfigVM_Task, 'duration_secs': 0.390269} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.953269] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-edb49903-4a3a-4c1c-9c1e-c7a5d6bfeb1c tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Reconfigured VM instance instance-0000006d to attach disk [datastore1] volume-da41f036-456a-409e-a359-6157800d323c/volume-da41f036-456a-409e-a359-6157800d323c.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1267.958775] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-967445c1-4f0d-49e9-be55-3036b38b9d78 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.977930] env[62522]: DEBUG oslo_vmware.api [None req-edb49903-4a3a-4c1c-9c1e-c7a5d6bfeb1c tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1267.977930] env[62522]: value = "task-2416447" [ 1267.977930] env[62522]: _type = "Task" [ 1267.977930] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.986699] env[62522]: DEBUG oslo_vmware.api [None req-edb49903-4a3a-4c1c-9c1e-c7a5d6bfeb1c tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416447, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.989427] env[62522]: DEBUG oslo_concurrency.lockutils [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1267.989730] env[62522]: DEBUG oslo_concurrency.lockutils [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1268.304292] env[62522]: DEBUG oslo_concurrency.lockutils [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "da11bae6-484b-455e-9462-6f5143d2a9a9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1268.304556] env[62522]: DEBUG oslo_concurrency.lockutils [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "da11bae6-484b-455e-9462-6f5143d2a9a9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1268.488627] env[62522]: DEBUG oslo_vmware.api [None req-edb49903-4a3a-4c1c-9c1e-c7a5d6bfeb1c tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416447, 'name': ReconfigVM_Task, 'duration_secs': 0.137543} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.488955] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-edb49903-4a3a-4c1c-9c1e-c7a5d6bfeb1c tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489869', 'volume_id': 'da41f036-456a-409e-a359-6157800d323c', 'name': 'volume-da41f036-456a-409e-a359-6157800d323c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7406a1a4-a342-475b-ad02-6a29f7c487ee', 'attached_at': '', 'detached_at': '', 'volume_id': 'da41f036-456a-409e-a359-6157800d323c', 'serial': 'da41f036-456a-409e-a359-6157800d323c'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1268.494352] env[62522]: INFO nova.compute.claims [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1268.807061] env[62522]: DEBUG nova.compute.manager [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1269.000551] env[62522]: INFO nova.compute.resource_tracker [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Updating resource usage from migration 3826051c-749f-40bd-a8e3-76ba379c3bca [ 1269.047594] env[62522]: DEBUG oslo_vmware.rw_handles [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Completed reading data from the image iterator. {{(pid=62522) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1269.047823] env[62522]: DEBUG oslo_vmware.rw_handles [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52632b64-8e9e-e476-859b-a4583ff9e165/disk-0.vmdk. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1269.048781] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d66feeb2-b0a2-493b-9908-474e680eeb41 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.059427] env[62522]: DEBUG oslo_vmware.rw_handles [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52632b64-8e9e-e476-859b-a4583ff9e165/disk-0.vmdk is in state: ready. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1269.059606] env[62522]: DEBUG oslo_vmware.rw_handles [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52632b64-8e9e-e476-859b-a4583ff9e165/disk-0.vmdk. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1269.059835] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-0face49e-c07e-4ce0-9482-2fe1c6dac0d9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.135796] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeadf741-03b3-4e76-91c0-fc312b1f3ee5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.144817] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-933047cb-2c1f-40cf-8010-b92d9df49dcf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.177387] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d59fa1d7-dcfd-427c-8b75-f0837d748620 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.185311] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63386305-9149-412c-96fc-d54257479be0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.198803] env[62522]: DEBUG nova.compute.provider_tree [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1269.264913] env[62522]: DEBUG oslo_vmware.rw_handles [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52632b64-8e9e-e476-859b-a4583ff9e165/disk-0.vmdk. {{(pid=62522) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1269.265331] env[62522]: INFO nova.virt.vmwareapi.images [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Downloaded image file data 72a425d1-613f-40dd-aa4b-ada95eb89bb3 [ 1269.266485] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe24696-f7dd-4f4f-93f3-18737c3bad7a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.283331] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cd4ca078-7d47-47a2-9c7a-df1c6e3cb7f8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.308290] env[62522]: INFO nova.virt.vmwareapi.images [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] The imported VM was unregistered [ 1269.310494] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Caching image {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1269.310750] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Creating directory with path [datastore1] devstack-image-cache_base/72a425d1-613f-40dd-aa4b-ada95eb89bb3 {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1269.313397] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d63caf54-7296-4064-991b-e0da4b71d6d5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.325224] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Created directory with path [datastore1] devstack-image-cache_base/72a425d1-613f-40dd-aa4b-ada95eb89bb3 {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1269.325443] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_6c8ae5b9-7475-427b-9a1d-bc308a8ea840/OSTACK_IMG_6c8ae5b9-7475-427b-9a1d-bc308a8ea840.vmdk to [datastore1] devstack-image-cache_base/72a425d1-613f-40dd-aa4b-ada95eb89bb3/72a425d1-613f-40dd-aa4b-ada95eb89bb3.vmdk. {{(pid=62522) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1269.325686] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-f15a3612-1816-4e99-bb99-72c3f91c5959 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.329484] env[62522]: DEBUG oslo_concurrency.lockutils [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1269.332689] env[62522]: DEBUG oslo_vmware.api [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1269.332689] env[62522]: value = "task-2416449" [ 1269.332689] env[62522]: _type = "Task" [ 1269.332689] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.339865] env[62522]: DEBUG oslo_vmware.api [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416449, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.530553] env[62522]: DEBUG nova.objects.instance [None req-edb49903-4a3a-4c1c-9c1e-c7a5d6bfeb1c tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lazy-loading 'flavor' on Instance uuid 7406a1a4-a342-475b-ad02-6a29f7c487ee {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1269.702638] env[62522]: DEBUG nova.scheduler.client.report [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1269.844347] env[62522]: DEBUG oslo_vmware.api [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416449, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.036275] env[62522]: DEBUG oslo_concurrency.lockutils [None req-edb49903-4a3a-4c1c-9c1e-c7a5d6bfeb1c tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "7406a1a4-a342-475b-ad02-6a29f7c487ee" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.301s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1270.208057] env[62522]: DEBUG oslo_concurrency.lockutils [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.218s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1270.208310] env[62522]: INFO nova.compute.manager [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Migrating [ 1270.216496] env[62522]: DEBUG oslo_concurrency.lockutils [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.887s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1270.218278] env[62522]: INFO nova.compute.claims [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1270.343575] env[62522]: DEBUG oslo_vmware.api [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416449, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.729551] env[62522]: DEBUG oslo_concurrency.lockutils [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "refresh_cache-961df2ff-bd02-45af-afb8-14a99cfea1de" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1270.729777] env[62522]: DEBUG oslo_concurrency.lockutils [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquired lock "refresh_cache-961df2ff-bd02-45af-afb8-14a99cfea1de" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1270.729926] env[62522]: DEBUG nova.network.neutron [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1270.843872] env[62522]: DEBUG oslo_vmware.api [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416449, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.904651] env[62522]: INFO nova.compute.manager [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Rebuilding instance [ 1270.988463] env[62522]: DEBUG nova.compute.manager [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1270.989552] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-120bdb52-060e-4ce2-9991-144fbb62da07 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.347649] env[62522]: DEBUG oslo_vmware.api [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416449, 'name': MoveVirtualDisk_Task} progress is 91%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.380338] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-406f9e92-bf91-424f-9524-0771452a3548 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.390500] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9c1c9c0-6323-4ab4-bc70-85e4e2eb765d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.424015] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb5ddd8-0ded-4a45-b8ac-1bee5d73573c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.432025] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-539fd47e-6695-4648-88c7-9f7bd8848feb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.446302] env[62522]: DEBUG nova.compute.provider_tree [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1271.499413] env[62522]: DEBUG nova.network.neutron [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Updating instance_info_cache with network_info: [{"id": "a15f47bb-6d26-4faf-91e1-6ce27453f7bf", "address": "fa:16:3e:46:c2:20", "network": {"id": "21d0ca22-5509-4f9b-b167-f9fde2dac808", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-547933771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a06421250694a98b13ff34ad816dc75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa15f47bb-6d", "ovs_interfaceid": "a15f47bb-6d26-4faf-91e1-6ce27453f7bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1271.844238] env[62522]: DEBUG oslo_vmware.api [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416449, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.264045} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.844475] env[62522]: INFO nova.virt.vmwareapi.ds_util [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_6c8ae5b9-7475-427b-9a1d-bc308a8ea840/OSTACK_IMG_6c8ae5b9-7475-427b-9a1d-bc308a8ea840.vmdk to [datastore1] devstack-image-cache_base/72a425d1-613f-40dd-aa4b-ada95eb89bb3/72a425d1-613f-40dd-aa4b-ada95eb89bb3.vmdk. [ 1271.844672] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Cleaning up location [datastore1] OSTACK_IMG_6c8ae5b9-7475-427b-9a1d-bc308a8ea840 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1271.844840] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_6c8ae5b9-7475-427b-9a1d-bc308a8ea840 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1271.845129] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d07de52e-ac87-42d7-b6c8-0899e43ecec8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.852219] env[62522]: DEBUG oslo_vmware.api [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1271.852219] env[62522]: value = "task-2416450" [ 1271.852219] env[62522]: _type = "Task" [ 1271.852219] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.859371] env[62522]: DEBUG oslo_vmware.api [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416450, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.949663] env[62522]: DEBUG nova.scheduler.client.report [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1272.003834] env[62522]: DEBUG oslo_concurrency.lockutils [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Releasing lock "refresh_cache-961df2ff-bd02-45af-afb8-14a99cfea1de" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1272.005400] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1272.005835] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b6d2d682-f327-420f-a88c-1bc3fabccdf3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.013045] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1272.013045] env[62522]: value = "task-2416451" [ 1272.013045] env[62522]: _type = "Task" [ 1272.013045] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.024011] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416451, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.361383] env[62522]: DEBUG oslo_vmware.api [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416450, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.032624} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.361798] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1272.361798] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Releasing lock "[datastore1] devstack-image-cache_base/72a425d1-613f-40dd-aa4b-ada95eb89bb3/72a425d1-613f-40dd-aa4b-ada95eb89bb3.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1272.362046] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/72a425d1-613f-40dd-aa4b-ada95eb89bb3/72a425d1-613f-40dd-aa4b-ada95eb89bb3.vmdk to [datastore1] cb7a19f1-6093-47ee-bbbc-a75dd5423f32/cb7a19f1-6093-47ee-bbbc-a75dd5423f32.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1272.362297] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-93b4c4cd-856b-480c-8975-8d621dcef444 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.369504] env[62522]: DEBUG oslo_vmware.api [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1272.369504] env[62522]: value = "task-2416452" [ 1272.369504] env[62522]: _type = "Task" [ 1272.369504] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.377975] env[62522]: DEBUG oslo_vmware.api [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416452, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.455388] env[62522]: DEBUG oslo_concurrency.lockutils [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.239s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1272.456126] env[62522]: DEBUG nova.compute.manager [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1272.522580] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416451, 'name': PowerOffVM_Task, 'duration_secs': 0.431048} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.522840] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1272.581142] env[62522]: INFO nova.compute.manager [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Detaching volume da41f036-456a-409e-a359-6157800d323c [ 1272.614889] env[62522]: INFO nova.virt.block_device [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Attempting to driver detach volume da41f036-456a-409e-a359-6157800d323c from mountpoint /dev/sdb [ 1272.615151] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Volume detach. Driver type: vmdk {{(pid=62522) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1272.615340] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489869', 'volume_id': 'da41f036-456a-409e-a359-6157800d323c', 'name': 'volume-da41f036-456a-409e-a359-6157800d323c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7406a1a4-a342-475b-ad02-6a29f7c487ee', 'attached_at': '', 'detached_at': '', 'volume_id': 'da41f036-456a-409e-a359-6157800d323c', 'serial': 'da41f036-456a-409e-a359-6157800d323c'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1272.616225] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03c94120-2efd-4350-8b60-7e31eb4112ba {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.637599] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d26f82c-3842-4d49-96bb-44165cc4ee98 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.644450] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a635c024-f52e-4b45-b352-471e0aa19438 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.664126] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10440250-3c29-42ea-a527-9e367359bb52 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.678721] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] The volume has not been displaced from its original location: [datastore1] volume-da41f036-456a-409e-a359-6157800d323c/volume-da41f036-456a-409e-a359-6157800d323c.vmdk. No consolidation needed. {{(pid=62522) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1272.683905] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Reconfiguring VM instance instance-0000006d to detach disk 2001 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1272.684212] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e2c82467-675c-4ece-9c2e-4bbb74c98a98 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.702263] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1272.702263] env[62522]: value = "task-2416453" [ 1272.702263] env[62522]: _type = "Task" [ 1272.702263] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.712104] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416453, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.880531] env[62522]: DEBUG oslo_vmware.api [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416452, 'name': CopyVirtualDisk_Task} progress is 12%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.962729] env[62522]: DEBUG nova.compute.utils [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1272.964550] env[62522]: DEBUG nova.compute.manager [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1272.964690] env[62522]: DEBUG nova.network.neutron [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1273.006626] env[62522]: DEBUG nova.policy [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c7a901dd2575462f9369f3d8819fb86d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82346c440c3343a0a5c233a48203a13c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1273.212333] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416453, 'name': ReconfigVM_Task, 'duration_secs': 0.201869} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1273.212631] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Reconfigured VM instance instance-0000006d to detach disk 2001 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1273.217479] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46fa5245-8253-415c-a225-65ca32cac969 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.233017] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1273.233017] env[62522]: value = "task-2416454" [ 1273.233017] env[62522]: _type = "Task" [ 1273.233017] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.241422] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416454, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.380206] env[62522]: DEBUG oslo_vmware.api [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416452, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.383084] env[62522]: DEBUG nova.network.neutron [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Successfully created port: d2a62d4f-3bdc-4367-8694-9ba47bdfd799 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1273.467651] env[62522]: DEBUG nova.compute.manager [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1273.519258] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f73b1b1-9d4f-4bcc-a557-980e0cbc13eb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.537891] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Updating instance '961df2ff-bd02-45af-afb8-14a99cfea1de' progress to 0 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1273.744457] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416454, 'name': ReconfigVM_Task, 'duration_secs': 0.190738} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1273.744788] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489869', 'volume_id': 'da41f036-456a-409e-a359-6157800d323c', 'name': 'volume-da41f036-456a-409e-a359-6157800d323c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7406a1a4-a342-475b-ad02-6a29f7c487ee', 'attached_at': '', 'detached_at': '', 'volume_id': 'da41f036-456a-409e-a359-6157800d323c', 'serial': 'da41f036-456a-409e-a359-6157800d323c'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1273.882610] env[62522]: DEBUG oslo_vmware.api [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416452, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.043983] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1274.044400] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-902a328f-214c-4298-875c-cb7b534fd935 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.052801] env[62522]: DEBUG oslo_vmware.api [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1274.052801] env[62522]: value = "task-2416455" [ 1274.052801] env[62522]: _type = "Task" [ 1274.052801] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.062539] env[62522]: DEBUG oslo_vmware.api [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416455, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.382988] env[62522]: DEBUG oslo_vmware.api [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416452, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.477676] env[62522]: DEBUG nova.compute.manager [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1274.511133] env[62522]: DEBUG nova.virt.hardware [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1274.511522] env[62522]: DEBUG nova.virt.hardware [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1274.511809] env[62522]: DEBUG nova.virt.hardware [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1274.512042] env[62522]: DEBUG nova.virt.hardware [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1274.512664] env[62522]: DEBUG nova.virt.hardware [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1274.512664] env[62522]: DEBUG nova.virt.hardware [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1274.512853] env[62522]: DEBUG nova.virt.hardware [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1274.513084] env[62522]: DEBUG nova.virt.hardware [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1274.513405] env[62522]: DEBUG nova.virt.hardware [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1274.513697] env[62522]: DEBUG nova.virt.hardware [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1274.513903] env[62522]: DEBUG nova.virt.hardware [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1274.515634] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5687e63-ccca-42d3-a6ed-ff78672bf47e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.526173] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7466351c-9c10-4bd2-b0f9-54e9fe764b91 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.565365] env[62522]: DEBUG oslo_vmware.api [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416455, 'name': PowerOffVM_Task, 'duration_secs': 0.186524} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.565771] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1274.565999] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Updating instance '961df2ff-bd02-45af-afb8-14a99cfea1de' progress to 17 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1274.797488] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1274.797488] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2cc539a1-e138-41a7-868c-f98a904bb348 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.803850] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1274.803850] env[62522]: value = "task-2416456" [ 1274.803850] env[62522]: _type = "Task" [ 1274.803850] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.813199] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] VM already powered off {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1274.813424] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Volume detach. Driver type: vmdk {{(pid=62522) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1274.813621] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489869', 'volume_id': 'da41f036-456a-409e-a359-6157800d323c', 'name': 'volume-da41f036-456a-409e-a359-6157800d323c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7406a1a4-a342-475b-ad02-6a29f7c487ee', 'attached_at': '', 'detached_at': '', 'volume_id': 'da41f036-456a-409e-a359-6157800d323c', 'serial': 'da41f036-456a-409e-a359-6157800d323c'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1274.814408] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58352c6d-2eb2-4fa0-af40-e08404970915 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.835293] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64f51d05-8d35-40e6-a250-53e99be75d7a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.838719] env[62522]: DEBUG nova.compute.manager [req-c7730f4d-afcb-4627-a47c-10f442b50019 req-eaa53890-0f41-4b53-bc88-23f725bcd312 service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Received event network-vif-plugged-d2a62d4f-3bdc-4367-8694-9ba47bdfd799 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1274.838926] env[62522]: DEBUG oslo_concurrency.lockutils [req-c7730f4d-afcb-4627-a47c-10f442b50019 req-eaa53890-0f41-4b53-bc88-23f725bcd312 service nova] Acquiring lock "da11bae6-484b-455e-9462-6f5143d2a9a9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1274.839144] env[62522]: DEBUG oslo_concurrency.lockutils [req-c7730f4d-afcb-4627-a47c-10f442b50019 req-eaa53890-0f41-4b53-bc88-23f725bcd312 service nova] Lock "da11bae6-484b-455e-9462-6f5143d2a9a9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1274.839314] env[62522]: DEBUG oslo_concurrency.lockutils [req-c7730f4d-afcb-4627-a47c-10f442b50019 req-eaa53890-0f41-4b53-bc88-23f725bcd312 service nova] Lock "da11bae6-484b-455e-9462-6f5143d2a9a9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1274.839483] env[62522]: DEBUG nova.compute.manager [req-c7730f4d-afcb-4627-a47c-10f442b50019 req-eaa53890-0f41-4b53-bc88-23f725bcd312 service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] No waiting events found dispatching network-vif-plugged-d2a62d4f-3bdc-4367-8694-9ba47bdfd799 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1274.839648] env[62522]: WARNING nova.compute.manager [req-c7730f4d-afcb-4627-a47c-10f442b50019 req-eaa53890-0f41-4b53-bc88-23f725bcd312 service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Received unexpected event network-vif-plugged-d2a62d4f-3bdc-4367-8694-9ba47bdfd799 for instance with vm_state building and task_state spawning. [ 1274.843599] env[62522]: WARNING nova.virt.vmwareapi.driver [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1274.843840] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1274.844582] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d95c99c1-63ec-46a9-9875-7dc3e3dda2c9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.850405] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1274.850618] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-38d5e96e-aa3e-4704-ae25-e443e4ca0c9a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.883190] env[62522]: DEBUG oslo_vmware.api [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416452, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.448496} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.883539] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/72a425d1-613f-40dd-aa4b-ada95eb89bb3/72a425d1-613f-40dd-aa4b-ada95eb89bb3.vmdk to [datastore1] cb7a19f1-6093-47ee-bbbc-a75dd5423f32/cb7a19f1-6093-47ee-bbbc-a75dd5423f32.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1274.884577] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6dae1fe-1dba-44ef-b3ad-854f90208185 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.906073] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] cb7a19f1-6093-47ee-bbbc-a75dd5423f32/cb7a19f1-6093-47ee-bbbc-a75dd5423f32.vmdk or device None with type streamOptimized {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1274.906386] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-74cfb1a2-daff-41c2-8532-2b46689a1293 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.922064] env[62522]: DEBUG nova.network.neutron [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Successfully updated port: d2a62d4f-3bdc-4367-8694-9ba47bdfd799 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1274.923833] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1274.923970] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1274.924090] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Deleting the datastore file [datastore1] 7406a1a4-a342-475b-ad02-6a29f7c487ee {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1274.924873] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c1c73447-9ac1-4a65-864c-ad124a7ce87b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.929763] env[62522]: DEBUG oslo_vmware.api [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1274.929763] env[62522]: value = "task-2416458" [ 1274.929763] env[62522]: _type = "Task" [ 1274.929763] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.934299] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1274.934299] env[62522]: value = "task-2416459" [ 1274.934299] env[62522]: _type = "Task" [ 1274.934299] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.943767] env[62522]: DEBUG oslo_vmware.api [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416458, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.948578] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416459, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.074047] env[62522]: DEBUG nova.virt.hardware [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:04Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1275.074338] env[62522]: DEBUG nova.virt.hardware [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1275.074646] env[62522]: DEBUG nova.virt.hardware [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1275.074762] env[62522]: DEBUG nova.virt.hardware [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1275.074839] env[62522]: DEBUG nova.virt.hardware [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1275.074981] env[62522]: DEBUG nova.virt.hardware [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1275.075270] env[62522]: DEBUG nova.virt.hardware [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1275.075402] env[62522]: DEBUG nova.virt.hardware [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1275.075594] env[62522]: DEBUG nova.virt.hardware [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1275.075766] env[62522]: DEBUG nova.virt.hardware [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1275.075942] env[62522]: DEBUG nova.virt.hardware [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1275.080918] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ed636f61-8810-4db5-ab30-095c5eb96f89 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.097273] env[62522]: DEBUG oslo_vmware.api [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1275.097273] env[62522]: value = "task-2416460" [ 1275.097273] env[62522]: _type = "Task" [ 1275.097273] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.105660] env[62522]: DEBUG oslo_vmware.api [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416460, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.425625] env[62522]: DEBUG oslo_concurrency.lockutils [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "refresh_cache-da11bae6-484b-455e-9462-6f5143d2a9a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1275.425950] env[62522]: DEBUG oslo_concurrency.lockutils [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquired lock "refresh_cache-da11bae6-484b-455e-9462-6f5143d2a9a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1275.425950] env[62522]: DEBUG nova.network.neutron [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1275.441353] env[62522]: DEBUG oslo_vmware.api [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416458, 'name': ReconfigVM_Task, 'duration_secs': 0.295773} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.442081] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Reconfigured VM instance instance-00000069 to attach disk [datastore1] cb7a19f1-6093-47ee-bbbc-a75dd5423f32/cb7a19f1-6093-47ee-bbbc-a75dd5423f32.vmdk or device None with type streamOptimized {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1275.442904] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-213b786f-8e96-4fa5-b62c-9ed0768b2781 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.447918] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416459, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164275} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.448503] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1275.448712] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1275.448924] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1275.453323] env[62522]: DEBUG oslo_vmware.api [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1275.453323] env[62522]: value = "task-2416461" [ 1275.453323] env[62522]: _type = "Task" [ 1275.453323] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.461276] env[62522]: DEBUG oslo_vmware.api [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416461, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.607361] env[62522]: DEBUG oslo_vmware.api [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416460, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.955688] env[62522]: INFO nova.virt.block_device [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Booting with volume da41f036-456a-409e-a359-6157800d323c at /dev/sdb [ 1275.960229] env[62522]: DEBUG nova.network.neutron [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1275.969457] env[62522]: DEBUG oslo_vmware.api [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416461, 'name': Rename_Task, 'duration_secs': 0.147479} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.969594] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1275.969783] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c3fab4c2-79de-42dd-bc7b-8123d3741ef2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.976035] env[62522]: DEBUG oslo_vmware.api [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1275.976035] env[62522]: value = "task-2416462" [ 1275.976035] env[62522]: _type = "Task" [ 1275.976035] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.985539] env[62522]: DEBUG oslo_vmware.api [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416462, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.990022] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-de81c06e-cdcb-41e5-a0b8-86a696bf2c34 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.997623] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0654d4f6-628f-4913-b4d9-0dc86de1a9f1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.027750] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f96dba95-119f-4156-9fa0-dd6214172d53 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.035490] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b473a2-bb45-4912-a980-9e59d0f2dfcf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.066228] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-806a9ca9-684f-42b6-8c25-188ba4c31491 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.072259] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef3bc806-2090-4d37-a9ca-fa4d33fd400e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.085244] env[62522]: DEBUG nova.virt.block_device [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Updating existing volume attachment record: 653ba21b-af8f-47b7-b267-5ecc6c524b1d {{(pid=62522) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1276.107094] env[62522]: DEBUG oslo_vmware.api [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416460, 'name': ReconfigVM_Task, 'duration_secs': 0.531249} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.107432] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Updating instance '961df2ff-bd02-45af-afb8-14a99cfea1de' progress to 33 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1276.124499] env[62522]: DEBUG nova.network.neutron [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Updating instance_info_cache with network_info: [{"id": "d2a62d4f-3bdc-4367-8694-9ba47bdfd799", "address": "fa:16:3e:73:de:e5", "network": {"id": "2a4f6f01-ad28-4f8f-b835-ea86e1791f49", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1577320995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82346c440c3343a0a5c233a48203a13c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2a62d4f-3b", "ovs_interfaceid": "d2a62d4f-3bdc-4367-8694-9ba47bdfd799", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1276.486050] env[62522]: DEBUG oslo_vmware.api [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416462, 'name': PowerOnVM_Task, 'duration_secs': 0.43487} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.486050] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1276.593621] env[62522]: DEBUG nova.compute.manager [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1276.594606] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64f5938b-e618-4385-bd47-493d8ebc5137 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.614396] env[62522]: DEBUG nova.virt.hardware [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1276.614625] env[62522]: DEBUG nova.virt.hardware [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1276.614782] env[62522]: DEBUG nova.virt.hardware [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1276.614964] env[62522]: DEBUG nova.virt.hardware [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1276.615220] env[62522]: DEBUG nova.virt.hardware [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1276.615413] env[62522]: DEBUG nova.virt.hardware [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1276.615655] env[62522]: DEBUG nova.virt.hardware [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1276.615863] env[62522]: DEBUG nova.virt.hardware [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1276.616084] env[62522]: DEBUG nova.virt.hardware [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1276.616282] env[62522]: DEBUG nova.virt.hardware [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1276.616520] env[62522]: DEBUG nova.virt.hardware [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1276.622194] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Reconfiguring VM instance instance-00000070 to detach disk 2000 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1276.622386] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aa0931a1-eb48-4c4a-bb82-00fb817338f1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.637517] env[62522]: DEBUG oslo_concurrency.lockutils [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Releasing lock "refresh_cache-da11bae6-484b-455e-9462-6f5143d2a9a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1276.637799] env[62522]: DEBUG nova.compute.manager [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Instance network_info: |[{"id": "d2a62d4f-3bdc-4367-8694-9ba47bdfd799", "address": "fa:16:3e:73:de:e5", "network": {"id": "2a4f6f01-ad28-4f8f-b835-ea86e1791f49", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1577320995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82346c440c3343a0a5c233a48203a13c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2a62d4f-3b", "ovs_interfaceid": "d2a62d4f-3bdc-4367-8694-9ba47bdfd799", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1276.638178] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:73:de:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '193994c7-8e1b-4f25-a4a4-d0563845eb28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd2a62d4f-3bdc-4367-8694-9ba47bdfd799', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1276.645445] env[62522]: DEBUG oslo.service.loopingcall [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1276.646735] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1276.646952] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c3d7c390-b015-4c9b-8d74-eb14d8d77382 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.662581] env[62522]: DEBUG oslo_vmware.api [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1276.662581] env[62522]: value = "task-2416463" [ 1276.662581] env[62522]: _type = "Task" [ 1276.662581] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.668244] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1276.668244] env[62522]: value = "task-2416464" [ 1276.668244] env[62522]: _type = "Task" [ 1276.668244] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.671438] env[62522]: DEBUG oslo_vmware.api [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416463, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.678696] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416464, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.855095] env[62522]: DEBUG nova.compute.manager [req-28ac9d97-fde2-47fc-a498-ebbc97745fc8 req-0a172b2f-8da1-4c71-991a-a00d24b21309 service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Received event network-changed-d2a62d4f-3bdc-4367-8694-9ba47bdfd799 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1276.855285] env[62522]: DEBUG nova.compute.manager [req-28ac9d97-fde2-47fc-a498-ebbc97745fc8 req-0a172b2f-8da1-4c71-991a-a00d24b21309 service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Refreshing instance network info cache due to event network-changed-d2a62d4f-3bdc-4367-8694-9ba47bdfd799. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1276.855533] env[62522]: DEBUG oslo_concurrency.lockutils [req-28ac9d97-fde2-47fc-a498-ebbc97745fc8 req-0a172b2f-8da1-4c71-991a-a00d24b21309 service nova] Acquiring lock "refresh_cache-da11bae6-484b-455e-9462-6f5143d2a9a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1276.855716] env[62522]: DEBUG oslo_concurrency.lockutils [req-28ac9d97-fde2-47fc-a498-ebbc97745fc8 req-0a172b2f-8da1-4c71-991a-a00d24b21309 service nova] Acquired lock "refresh_cache-da11bae6-484b-455e-9462-6f5143d2a9a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1276.855911] env[62522]: DEBUG nova.network.neutron [req-28ac9d97-fde2-47fc-a498-ebbc97745fc8 req-0a172b2f-8da1-4c71-991a-a00d24b21309 service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Refreshing network info cache for port d2a62d4f-3bdc-4367-8694-9ba47bdfd799 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1277.110071] env[62522]: DEBUG oslo_concurrency.lockutils [None req-d4e3dcba-19f2-4c34-87b1-82f58f4335fd tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "cb7a19f1-6093-47ee-bbbc-a75dd5423f32" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 19.022s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.174165] env[62522]: DEBUG oslo_vmware.api [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416463, 'name': ReconfigVM_Task, 'duration_secs': 0.299714} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.177151] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Reconfigured VM instance instance-00000070 to detach disk 2000 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1277.177897] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70b8e30e-a303-4e55-b090-e86a2af1f832 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.186903] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416464, 'name': CreateVM_Task, 'duration_secs': 0.308082} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.196803] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1277.204553] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] 961df2ff-bd02-45af-afb8-14a99cfea1de/961df2ff-bd02-45af-afb8-14a99cfea1de.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1277.205197] env[62522]: DEBUG oslo_concurrency.lockutils [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1277.205356] env[62522]: DEBUG oslo_concurrency.lockutils [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1277.205663] env[62522]: DEBUG oslo_concurrency.lockutils [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1277.206108] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4bb165e0-286d-4498-b30d-4f5e7fa3c000 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.218267] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ce0a215-700b-4f84-965b-3e80ea26f851 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.223954] env[62522]: DEBUG oslo_vmware.api [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1277.223954] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]523dd7a3-3c67-0c82-3523-ad2b4778cf7b" [ 1277.223954] env[62522]: _type = "Task" [ 1277.223954] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.225326] env[62522]: DEBUG oslo_vmware.api [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1277.225326] env[62522]: value = "task-2416465" [ 1277.225326] env[62522]: _type = "Task" [ 1277.225326] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.235019] env[62522]: DEBUG oslo_vmware.api [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]523dd7a3-3c67-0c82-3523-ad2b4778cf7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.237857] env[62522]: DEBUG oslo_vmware.api [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416465, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.738559] env[62522]: DEBUG oslo_vmware.api [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]523dd7a3-3c67-0c82-3523-ad2b4778cf7b, 'name': SearchDatastore_Task, 'duration_secs': 0.02036} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.741597] env[62522]: DEBUG oslo_concurrency.lockutils [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1277.742086] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1277.742387] env[62522]: DEBUG oslo_concurrency.lockutils [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1277.742583] env[62522]: DEBUG oslo_concurrency.lockutils [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1277.742805] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1277.743125] env[62522]: DEBUG oslo_vmware.api [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416465, 'name': ReconfigVM_Task, 'duration_secs': 0.257297} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.743362] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e828f6d9-5fc5-4606-9388-40202182e9cf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.745146] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Reconfigured VM instance instance-00000070 to attach disk [datastore2] 961df2ff-bd02-45af-afb8-14a99cfea1de/961df2ff-bd02-45af-afb8-14a99cfea1de.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1277.745482] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Updating instance '961df2ff-bd02-45af-afb8-14a99cfea1de' progress to 50 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1277.755613] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1277.755849] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1277.756603] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-904b7be3-e098-4d41-873a-8fcc53130bd1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.761563] env[62522]: DEBUG oslo_vmware.api [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1277.761563] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5253491f-12f2-9aa8-2883-e66fa808e5f1" [ 1277.761563] env[62522]: _type = "Task" [ 1277.761563] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.769751] env[62522]: DEBUG oslo_vmware.api [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5253491f-12f2-9aa8-2883-e66fa808e5f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.933800] env[62522]: DEBUG nova.network.neutron [req-28ac9d97-fde2-47fc-a498-ebbc97745fc8 req-0a172b2f-8da1-4c71-991a-a00d24b21309 service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Updated VIF entry in instance network info cache for port d2a62d4f-3bdc-4367-8694-9ba47bdfd799. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1277.934217] env[62522]: DEBUG nova.network.neutron [req-28ac9d97-fde2-47fc-a498-ebbc97745fc8 req-0a172b2f-8da1-4c71-991a-a00d24b21309 service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Updating instance_info_cache with network_info: [{"id": "d2a62d4f-3bdc-4367-8694-9ba47bdfd799", "address": "fa:16:3e:73:de:e5", "network": {"id": "2a4f6f01-ad28-4f8f-b835-ea86e1791f49", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1577320995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82346c440c3343a0a5c233a48203a13c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2a62d4f-3b", "ovs_interfaceid": "d2a62d4f-3bdc-4367-8694-9ba47bdfd799", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1278.213591] env[62522]: DEBUG nova.virt.hardware [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1278.213842] env[62522]: DEBUG nova.virt.hardware [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1278.214018] env[62522]: DEBUG nova.virt.hardware [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1278.214296] env[62522]: DEBUG nova.virt.hardware [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1278.214457] env[62522]: DEBUG nova.virt.hardware [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1278.214617] env[62522]: DEBUG nova.virt.hardware [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1278.214822] env[62522]: DEBUG nova.virt.hardware [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1278.214983] env[62522]: DEBUG nova.virt.hardware [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1278.215173] env[62522]: DEBUG nova.virt.hardware [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1278.215339] env[62522]: DEBUG nova.virt.hardware [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1278.215513] env[62522]: DEBUG nova.virt.hardware [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1278.216439] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b44c2a4-7174-43d5-a829-1c188bc7e86d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.224188] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ecf5d8a-9f5d-49eb-a078-af2638001e18 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.237117] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:27:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd7b5f1ef-d4b9-4ec3-b047-17e4cb349d25', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1f2dff01-fe9d-46ea-af42-ec9d20c5ac2e', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1278.244258] env[62522]: DEBUG oslo.service.loopingcall [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1278.244456] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1278.244648] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-85ab1a14-7d6c-4558-be46-fe79b41d2422 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.259407] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07beecba-674f-4fe8-857a-f997e67192bd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.268359] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1278.268359] env[62522]: value = "task-2416466" [ 1278.268359] env[62522]: _type = "Task" [ 1278.268359] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.284701] env[62522]: DEBUG oslo_vmware.api [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5253491f-12f2-9aa8-2883-e66fa808e5f1, 'name': SearchDatastore_Task, 'duration_secs': 0.009071} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.288362] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15314ce2-03fc-4f65-bf94-a4a6a9e0889c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.290530] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7db5db53-5159-427d-973b-6012d193d633 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.297640] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416466, 'name': CreateVM_Task} progress is 15%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.311859] env[62522]: DEBUG oslo_vmware.api [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1278.311859] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52cd2e26-e03b-0b4c-842c-7252ed0cb99a" [ 1278.311859] env[62522]: _type = "Task" [ 1278.311859] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.312160] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Updating instance '961df2ff-bd02-45af-afb8-14a99cfea1de' progress to 67 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1278.323851] env[62522]: DEBUG oslo_vmware.api [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52cd2e26-e03b-0b4c-842c-7252ed0cb99a, 'name': SearchDatastore_Task, 'duration_secs': 0.009564} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.324163] env[62522]: DEBUG oslo_concurrency.lockutils [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1278.324365] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] da11bae6-484b-455e-9462-6f5143d2a9a9/da11bae6-484b-455e-9462-6f5143d2a9a9.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1278.324615] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1a6f25f8-4d98-4129-9a06-3c8c951ec95d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.330436] env[62522]: DEBUG oslo_vmware.api [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1278.330436] env[62522]: value = "task-2416467" [ 1278.330436] env[62522]: _type = "Task" [ 1278.330436] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.337733] env[62522]: DEBUG oslo_vmware.api [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416467, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.436603] env[62522]: DEBUG oslo_concurrency.lockutils [req-28ac9d97-fde2-47fc-a498-ebbc97745fc8 req-0a172b2f-8da1-4c71-991a-a00d24b21309 service nova] Releasing lock "refresh_cache-da11bae6-484b-455e-9462-6f5143d2a9a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1278.793842] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416466, 'name': CreateVM_Task, 'duration_secs': 0.306372} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.794102] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1278.794746] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1278.794906] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1278.795246] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1278.795498] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d925d466-0952-4a70-a8b7-dd20224167cf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.799712] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1278.799712] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5296e65a-cb2a-5b6f-7ef4-2ebe578efd6c" [ 1278.799712] env[62522]: _type = "Task" [ 1278.799712] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.807161] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5296e65a-cb2a-5b6f-7ef4-2ebe578efd6c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.838694] env[62522]: DEBUG oslo_vmware.api [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416467, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.43015} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.838947] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] da11bae6-484b-455e-9462-6f5143d2a9a9/da11bae6-484b-455e-9462-6f5143d2a9a9.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1278.839211] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1278.839450] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-16d9f990-c3b1-4ca5-bc70-e661639f3919 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.845760] env[62522]: DEBUG oslo_vmware.api [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1278.845760] env[62522]: value = "task-2416468" [ 1278.845760] env[62522]: _type = "Task" [ 1278.845760] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.852978] env[62522]: DEBUG oslo_vmware.api [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416468, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.854231] env[62522]: DEBUG nova.network.neutron [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Port a15f47bb-6d26-4faf-91e1-6ce27453f7bf binding to destination host cpu-1 is already ACTIVE {{(pid=62522) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1279.311903] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5296e65a-cb2a-5b6f-7ef4-2ebe578efd6c, 'name': SearchDatastore_Task, 'duration_secs': 0.009139} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.312164] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1279.312414] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1279.312648] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1279.312795] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1279.312973] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1279.313253] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6abaceeb-d0ae-46fb-9763-7138cc3935b5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.331528] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1279.331703] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1279.332442] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-455594d9-5684-48cb-bfb4-46b1a51191fe {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.337515] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1279.337515] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5230b23b-b322-31f1-92a1-82b457f66261" [ 1279.337515] env[62522]: _type = "Task" [ 1279.337515] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.344359] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5230b23b-b322-31f1-92a1-82b457f66261, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.353166] env[62522]: DEBUG oslo_vmware.api [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416468, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.057787} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.353405] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1279.354128] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ef592af-915d-415f-a044-ed0669e41ef3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.378261] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] da11bae6-484b-455e-9462-6f5143d2a9a9/da11bae6-484b-455e-9462-6f5143d2a9a9.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1279.378528] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a8bcac27-4468-4dd9-97e1-2b3b6fa15d7a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.397607] env[62522]: DEBUG oslo_vmware.api [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1279.397607] env[62522]: value = "task-2416469" [ 1279.397607] env[62522]: _type = "Task" [ 1279.397607] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.406254] env[62522]: DEBUG oslo_vmware.api [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416469, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.847650] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5230b23b-b322-31f1-92a1-82b457f66261, 'name': SearchDatastore_Task, 'duration_secs': 0.044732} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.848434] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2686232d-8709-4a4f-b84f-2fd00f4678a5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.853321] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1279.853321] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52825422-4383-423a-3301-c8aeb12bbd3b" [ 1279.853321] env[62522]: _type = "Task" [ 1279.853321] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.867677] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52825422-4383-423a-3301-c8aeb12bbd3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.873855] env[62522]: DEBUG oslo_concurrency.lockutils [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "961df2ff-bd02-45af-afb8-14a99cfea1de-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1279.874092] env[62522]: DEBUG oslo_concurrency.lockutils [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "961df2ff-bd02-45af-afb8-14a99cfea1de-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1279.874318] env[62522]: DEBUG oslo_concurrency.lockutils [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "961df2ff-bd02-45af-afb8-14a99cfea1de-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1279.906760] env[62522]: DEBUG oslo_vmware.api [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416469, 'name': ReconfigVM_Task, 'duration_secs': 0.305138} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.907050] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Reconfigured VM instance instance-00000071 to attach disk [datastore1] da11bae6-484b-455e-9462-6f5143d2a9a9/da11bae6-484b-455e-9462-6f5143d2a9a9.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1279.907694] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a13e4f96-938f-4c31-b902-d55b7d4e6e66 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.914256] env[62522]: DEBUG oslo_vmware.api [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1279.914256] env[62522]: value = "task-2416470" [ 1279.914256] env[62522]: _type = "Task" [ 1279.914256] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.921692] env[62522]: DEBUG oslo_vmware.api [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416470, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.363969] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52825422-4383-423a-3301-c8aeb12bbd3b, 'name': SearchDatastore_Task, 'duration_secs': 0.039639} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.364822] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1280.364822] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 7406a1a4-a342-475b-ad02-6a29f7c487ee/7406a1a4-a342-475b-ad02-6a29f7c487ee.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1280.365067] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-83199c15-5e54-4734-809a-fe7105fcd8a1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.371624] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1280.371624] env[62522]: value = "task-2416471" [ 1280.371624] env[62522]: _type = "Task" [ 1280.371624] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.381778] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416471, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.423515] env[62522]: DEBUG oslo_vmware.api [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416470, 'name': Rename_Task, 'duration_secs': 0.143353} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.423777] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1280.424018] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-77b472a9-3813-487d-b52e-d3924c23bb3d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.429518] env[62522]: DEBUG oslo_vmware.api [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1280.429518] env[62522]: value = "task-2416472" [ 1280.429518] env[62522]: _type = "Task" [ 1280.429518] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.436927] env[62522]: DEBUG oslo_vmware.api [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416472, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.884890] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416471, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.471874} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.885406] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 7406a1a4-a342-475b-ad02-6a29f7c487ee/7406a1a4-a342-475b-ad02-6a29f7c487ee.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1280.885527] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1280.887066] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f0b1ec0a-590d-4c84-97fc-08eebda976eb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.893947] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1280.893947] env[62522]: value = "task-2416473" [ 1280.893947] env[62522]: _type = "Task" [ 1280.893947] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.904440] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416473, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.923202] env[62522]: DEBUG oslo_concurrency.lockutils [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "refresh_cache-961df2ff-bd02-45af-afb8-14a99cfea1de" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1280.923518] env[62522]: DEBUG oslo_concurrency.lockutils [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquired lock "refresh_cache-961df2ff-bd02-45af-afb8-14a99cfea1de" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1280.923827] env[62522]: DEBUG nova.network.neutron [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1280.944494] env[62522]: DEBUG oslo_vmware.api [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416472, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.404626] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416473, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.324393} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.404944] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1281.405755] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68d0b4bb-774c-44e3-b346-acec42844250 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.427199] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] 7406a1a4-a342-475b-ad02-6a29f7c487ee/7406a1a4-a342-475b-ad02-6a29f7c487ee.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1281.429480] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2fcb274e-9802-4cb8-b545-a0fd0ddf8735 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.451969] env[62522]: DEBUG oslo_vmware.api [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416472, 'name': PowerOnVM_Task, 'duration_secs': 0.724594} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.453225] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1281.453440] env[62522]: INFO nova.compute.manager [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Took 6.98 seconds to spawn the instance on the hypervisor. [ 1281.453639] env[62522]: DEBUG nova.compute.manager [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1281.453987] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1281.453987] env[62522]: value = "task-2416474" [ 1281.453987] env[62522]: _type = "Task" [ 1281.453987] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.454718] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b72346f2-f83f-425e-b0b0-c98b5a46a87b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.469452] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416474, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.666181] env[62522]: DEBUG nova.network.neutron [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Updating instance_info_cache with network_info: [{"id": "a15f47bb-6d26-4faf-91e1-6ce27453f7bf", "address": "fa:16:3e:46:c2:20", "network": {"id": "21d0ca22-5509-4f9b-b167-f9fde2dac808", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-547933771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a06421250694a98b13ff34ad816dc75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa15f47bb-6d", "ovs_interfaceid": "a15f47bb-6d26-4faf-91e1-6ce27453f7bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1281.968439] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416474, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.976498] env[62522]: INFO nova.compute.manager [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Took 12.66 seconds to build instance. [ 1282.169268] env[62522]: DEBUG oslo_concurrency.lockutils [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Releasing lock "refresh_cache-961df2ff-bd02-45af-afb8-14a99cfea1de" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1282.467316] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416474, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.478287] env[62522]: DEBUG oslo_concurrency.lockutils [None req-232fc076-ddb5-47d5-9c34-238700fd7b30 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "da11bae6-484b-455e-9462-6f5143d2a9a9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.174s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1282.590401] env[62522]: DEBUG nova.compute.manager [req-030d8393-b900-414d-85cd-5453e3f5b15e req-882a78ef-90c6-4dfa-9a9c-434c1198d7ba service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Received event network-changed-d2a62d4f-3bdc-4367-8694-9ba47bdfd799 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1282.590607] env[62522]: DEBUG nova.compute.manager [req-030d8393-b900-414d-85cd-5453e3f5b15e req-882a78ef-90c6-4dfa-9a9c-434c1198d7ba service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Refreshing instance network info cache due to event network-changed-d2a62d4f-3bdc-4367-8694-9ba47bdfd799. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1282.590816] env[62522]: DEBUG oslo_concurrency.lockutils [req-030d8393-b900-414d-85cd-5453e3f5b15e req-882a78ef-90c6-4dfa-9a9c-434c1198d7ba service nova] Acquiring lock "refresh_cache-da11bae6-484b-455e-9462-6f5143d2a9a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1282.590962] env[62522]: DEBUG oslo_concurrency.lockutils [req-030d8393-b900-414d-85cd-5453e3f5b15e req-882a78ef-90c6-4dfa-9a9c-434c1198d7ba service nova] Acquired lock "refresh_cache-da11bae6-484b-455e-9462-6f5143d2a9a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1282.591328] env[62522]: DEBUG nova.network.neutron [req-030d8393-b900-414d-85cd-5453e3f5b15e req-882a78ef-90c6-4dfa-9a9c-434c1198d7ba service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Refreshing network info cache for port d2a62d4f-3bdc-4367-8694-9ba47bdfd799 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1282.695359] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf7eeef-f694-48bf-8810-61e44ae2ebe9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.714213] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23783579-24b7-4987-a3b9-2c787de69340 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.720602] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Updating instance '961df2ff-bd02-45af-afb8-14a99cfea1de' progress to 83 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1282.967179] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416474, 'name': ReconfigVM_Task, 'duration_secs': 1.202401} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.967475] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Reconfigured VM instance instance-0000006d to attach disk [datastore1] 7406a1a4-a342-475b-ad02-6a29f7c487ee/7406a1a4-a342-475b-ad02-6a29f7c487ee.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1282.968655] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'encrypted': False, 'size': 0, 'device_name': '/dev/sda', 'guest_format': None, 'device_type': 'disk', 'encryption_format': None, 'encryption_secret_uuid': None, 'boot_index': 0, 'disk_bus': None, 'encryption_options': None, 'image_id': '2ee4561b-ba48-4f45-82f6-eac89be98290'}], 'ephemerals': [], 'block_device_mapping': [{'mount_device': '/dev/sdb', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489869', 'volume_id': 'da41f036-456a-409e-a359-6157800d323c', 'name': 'volume-da41f036-456a-409e-a359-6157800d323c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7406a1a4-a342-475b-ad02-6a29f7c487ee', 'attached_at': '', 'detached_at': '', 'volume_id': 'da41f036-456a-409e-a359-6157800d323c', 'serial': 'da41f036-456a-409e-a359-6157800d323c'}, 'attachment_id': '653ba21b-af8f-47b7-b267-5ecc6c524b1d', 'delete_on_termination': False, 'guest_format': None, 'device_type': None, 'boot_index': None, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=62522) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1282.968964] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Volume attach. Driver type: vmdk {{(pid=62522) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1282.969055] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489869', 'volume_id': 'da41f036-456a-409e-a359-6157800d323c', 'name': 'volume-da41f036-456a-409e-a359-6157800d323c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7406a1a4-a342-475b-ad02-6a29f7c487ee', 'attached_at': '', 'detached_at': '', 'volume_id': 'da41f036-456a-409e-a359-6157800d323c', 'serial': 'da41f036-456a-409e-a359-6157800d323c'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1282.969815] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-319d2aaa-0deb-4a3f-b2c4-b7ec74033b83 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.984665] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-935f7451-da73-49ca-9365-8dda1efab3aa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.007745] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] volume-da41f036-456a-409e-a359-6157800d323c/volume-da41f036-456a-409e-a359-6157800d323c.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1283.008012] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc6fe82d-1ed6-4125-b083-ebe1ee93148a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.026489] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1283.026489] env[62522]: value = "task-2416475" [ 1283.026489] env[62522]: _type = "Task" [ 1283.026489] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.036190] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416475, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.227028] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1283.227375] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9fab166f-587d-4ffe-86b6-a08812d6df16 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.235776] env[62522]: DEBUG oslo_vmware.api [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1283.235776] env[62522]: value = "task-2416476" [ 1283.235776] env[62522]: _type = "Task" [ 1283.235776] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.246918] env[62522]: DEBUG oslo_vmware.api [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416476, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.307911] env[62522]: DEBUG nova.network.neutron [req-030d8393-b900-414d-85cd-5453e3f5b15e req-882a78ef-90c6-4dfa-9a9c-434c1198d7ba service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Updated VIF entry in instance network info cache for port d2a62d4f-3bdc-4367-8694-9ba47bdfd799. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1283.308452] env[62522]: DEBUG nova.network.neutron [req-030d8393-b900-414d-85cd-5453e3f5b15e req-882a78ef-90c6-4dfa-9a9c-434c1198d7ba service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Updating instance_info_cache with network_info: [{"id": "d2a62d4f-3bdc-4367-8694-9ba47bdfd799", "address": "fa:16:3e:73:de:e5", "network": {"id": "2a4f6f01-ad28-4f8f-b835-ea86e1791f49", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1577320995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82346c440c3343a0a5c233a48203a13c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2a62d4f-3b", "ovs_interfaceid": "d2a62d4f-3bdc-4367-8694-9ba47bdfd799", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1283.537721] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416475, 'name': ReconfigVM_Task, 'duration_secs': 0.305721} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.537961] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Reconfigured VM instance instance-0000006d to attach disk [datastore1] volume-da41f036-456a-409e-a359-6157800d323c/volume-da41f036-456a-409e-a359-6157800d323c.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1283.542604] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e5bf33a-1c24-425e-8c55-2e45bd4a4d0b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.559389] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1283.559389] env[62522]: value = "task-2416477" [ 1283.559389] env[62522]: _type = "Task" [ 1283.559389] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.568108] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416477, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.746879] env[62522]: DEBUG oslo_vmware.api [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416476, 'name': PowerOnVM_Task, 'duration_secs': 0.510408} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.747179] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1283.747369] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-945da448-c78e-41e5-b117-0efeba5d387e tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Updating instance '961df2ff-bd02-45af-afb8-14a99cfea1de' progress to 100 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1283.811644] env[62522]: DEBUG oslo_concurrency.lockutils [req-030d8393-b900-414d-85cd-5453e3f5b15e req-882a78ef-90c6-4dfa-9a9c-434c1198d7ba service nova] Releasing lock "refresh_cache-da11bae6-484b-455e-9462-6f5143d2a9a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1284.070019] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416477, 'name': ReconfigVM_Task, 'duration_secs': 0.151204} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.070359] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489869', 'volume_id': 'da41f036-456a-409e-a359-6157800d323c', 'name': 'volume-da41f036-456a-409e-a359-6157800d323c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7406a1a4-a342-475b-ad02-6a29f7c487ee', 'attached_at': '', 'detached_at': '', 'volume_id': 'da41f036-456a-409e-a359-6157800d323c', 'serial': 'da41f036-456a-409e-a359-6157800d323c'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1284.070892] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-abc40a0e-d27e-43cc-9d48-e10c61084d8c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.078889] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1284.078889] env[62522]: value = "task-2416478" [ 1284.078889] env[62522]: _type = "Task" [ 1284.078889] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.087129] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416478, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.589438] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416478, 'name': Rename_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.089809] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416478, 'name': Rename_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.590450] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416478, 'name': Rename_Task, 'duration_secs': 1.156691} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.590732] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1285.590992] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ff8c427f-161a-476b-9c6c-4171400aa106 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.596669] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1285.596669] env[62522]: value = "task-2416479" [ 1285.596669] env[62522]: _type = "Task" [ 1285.596669] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.607738] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416479, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.106849] env[62522]: DEBUG oslo_vmware.api [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416479, 'name': PowerOnVM_Task, 'duration_secs': 0.486372} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.107228] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1286.107353] env[62522]: DEBUG nova.compute.manager [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1286.108101] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ddae3df-7461-4628-b8a7-e80a1930ae2d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.206090] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "961df2ff-bd02-45af-afb8-14a99cfea1de" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1286.206372] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "961df2ff-bd02-45af-afb8-14a99cfea1de" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1286.206670] env[62522]: DEBUG nova.compute.manager [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Going to confirm migration 7 {{(pid=62522) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1286.624990] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1286.625269] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1286.625464] env[62522]: DEBUG nova.objects.instance [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62522) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1286.743666] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "refresh_cache-961df2ff-bd02-45af-afb8-14a99cfea1de" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1286.743856] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquired lock "refresh_cache-961df2ff-bd02-45af-afb8-14a99cfea1de" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1286.744043] env[62522]: DEBUG nova.network.neutron [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1286.744309] env[62522]: DEBUG nova.objects.instance [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lazy-loading 'info_cache' on Instance uuid 961df2ff-bd02-45af-afb8-14a99cfea1de {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1287.247333] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1287.634112] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3784317f-5572-4ba2-befa-aa8434c9de26 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1287.954632] env[62522]: DEBUG nova.network.neutron [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Updating instance_info_cache with network_info: [{"id": "a15f47bb-6d26-4faf-91e1-6ce27453f7bf", "address": "fa:16:3e:46:c2:20", "network": {"id": "21d0ca22-5509-4f9b-b167-f9fde2dac808", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-547933771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a06421250694a98b13ff34ad816dc75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa15f47bb-6d", "ovs_interfaceid": "a15f47bb-6d26-4faf-91e1-6ce27453f7bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1288.457837] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Releasing lock "refresh_cache-961df2ff-bd02-45af-afb8-14a99cfea1de" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1288.458240] env[62522]: DEBUG nova.objects.instance [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lazy-loading 'migration_context' on Instance uuid 961df2ff-bd02-45af-afb8-14a99cfea1de {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1288.961817] env[62522]: DEBUG nova.objects.base [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Object Instance<961df2ff-bd02-45af-afb8-14a99cfea1de> lazy-loaded attributes: info_cache,migration_context {{(pid=62522) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1288.962809] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e49b654-70e9-4ad7-a433-04bd7e8cfbc2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.984507] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e3c5608-1221-4926-a248-41bad1754092 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.990362] env[62522]: DEBUG oslo_vmware.api [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1288.990362] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f186b0-857e-80d3-7bb3-df3fb78a7c70" [ 1288.990362] env[62522]: _type = "Task" [ 1288.990362] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.999143] env[62522]: DEBUG oslo_vmware.api [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f186b0-857e-80d3-7bb3-df3fb78a7c70, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.501024] env[62522]: DEBUG oslo_vmware.api [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f186b0-857e-80d3-7bb3-df3fb78a7c70, 'name': SearchDatastore_Task, 'duration_secs': 0.009561} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.501394] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1289.501501] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1290.109555] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8519ab19-b23a-4a2e-a47f-4fd7451e7775 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.117206] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aa8a68a-bf7b-4135-adf1-80f1d44d832a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.146850] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd7c44c3-d72c-4bbd-adc2-8416a668b289 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.153977] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-708154b1-fcc2-49fb-8092-e075a51aa809 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.166813] env[62522]: DEBUG nova.compute.provider_tree [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1290.244537] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1290.670575] env[62522]: DEBUG nova.scheduler.client.report [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1291.246248] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1291.246492] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1291.681789] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.180s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1291.749733] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1291.749962] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1291.750156] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1291.750317] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62522) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1291.751210] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-137469fa-23b7-4fc2-86bf-ee1ed71f84dd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.759710] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc7308a3-fa0f-4e7a-bf95-2df9673e5934 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.773218] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e5003f4-8530-41b9-89d8-85a7a8f6ae5f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.779280] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3a75573-4717-487c-b6b6-a4c8e0759764 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.807303] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180396MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=62522) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1291.807447] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1291.807625] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1292.236740] env[62522]: INFO nova.scheduler.client.report [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Deleted allocation for migration 3826051c-749f-40bd-a8e3-76ba379c3bca [ 1292.742047] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "961df2ff-bd02-45af-afb8-14a99cfea1de" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.535s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1292.831232] env[62522]: DEBUG oslo_concurrency.lockutils [None req-98c57058-4ccb-4c3f-b0b2-be72ca61fb13 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "27f4b976-7dff-49b0-9b00-7515cb976e72" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1292.831494] env[62522]: DEBUG oslo_concurrency.lockutils [None req-98c57058-4ccb-4c3f-b0b2-be72ca61fb13 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "27f4b976-7dff-49b0-9b00-7515cb976e72" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1292.831732] env[62522]: INFO nova.compute.manager [None req-98c57058-4ccb-4c3f-b0b2-be72ca61fb13 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Rebooting instance [ 1292.836599] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance b31195c2-29f4-475c-baa7-fcb4791b7278 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1292.836745] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance a4cb5c19-9087-4354-9689-a99ae8924dc1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1292.836854] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 27f4b976-7dff-49b0-9b00-7515cb976e72 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1292.836973] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 7406a1a4-a342-475b-ad02-6a29f7c487ee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1292.837110] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance f74196c1-b00f-4f42-84dc-17b21fa30374 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1292.837225] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance cb7a19f1-6093-47ee-bbbc-a75dd5423f32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1292.837339] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 961df2ff-bd02-45af-afb8-14a99cfea1de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1292.837452] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance da11bae6-484b-455e-9462-6f5143d2a9a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1292.837641] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=62522) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1292.837773] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2112MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=62522) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1292.933084] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f200b892-07ad-417f-87c1-7ff285f39241 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.943023] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aab90c18-3597-4aeb-b06d-c10a498c5536 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.974912] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc456f4b-0e31-4493-851a-69d8efc0fe5b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.982669] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f87e80c1-0c61-495f-8d51-1b2860a81ad7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.995861] env[62522]: DEBUG nova.compute.provider_tree [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1293.348513] env[62522]: DEBUG oslo_concurrency.lockutils [None req-98c57058-4ccb-4c3f-b0b2-be72ca61fb13 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "refresh_cache-27f4b976-7dff-49b0-9b00-7515cb976e72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1293.348701] env[62522]: DEBUG oslo_concurrency.lockutils [None req-98c57058-4ccb-4c3f-b0b2-be72ca61fb13 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquired lock "refresh_cache-27f4b976-7dff-49b0-9b00-7515cb976e72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1293.348877] env[62522]: DEBUG nova.network.neutron [None req-98c57058-4ccb-4c3f-b0b2-be72ca61fb13 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1293.498831] env[62522]: DEBUG nova.scheduler.client.report [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1294.003009] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62522) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1294.003297] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.196s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1294.057897] env[62522]: DEBUG nova.network.neutron [None req-98c57058-4ccb-4c3f-b0b2-be72ca61fb13 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Updating instance_info_cache with network_info: [{"id": "cf4b3978-2fa2-4182-9422-abf29faafcf6", "address": "fa:16:3e:74:26:e7", "network": {"id": "e3153dad-6ab7-45a3-bda4-5ebbd95258f4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-521501740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab9d5d3c27d4c218b88e4a029300a66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf4b3978-2f", "ovs_interfaceid": "cf4b3978-2fa2-4182-9422-abf29faafcf6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1294.231868] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "961df2ff-bd02-45af-afb8-14a99cfea1de" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1294.232193] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "961df2ff-bd02-45af-afb8-14a99cfea1de" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1294.232424] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "961df2ff-bd02-45af-afb8-14a99cfea1de-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1294.232617] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "961df2ff-bd02-45af-afb8-14a99cfea1de-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1294.232788] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "961df2ff-bd02-45af-afb8-14a99cfea1de-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1294.234821] env[62522]: INFO nova.compute.manager [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Terminating instance [ 1294.560751] env[62522]: DEBUG oslo_concurrency.lockutils [None req-98c57058-4ccb-4c3f-b0b2-be72ca61fb13 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Releasing lock "refresh_cache-27f4b976-7dff-49b0-9b00-7515cb976e72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1294.737948] env[62522]: DEBUG nova.compute.manager [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1294.738217] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1294.739130] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d06753ae-5afc-40f5-8663-6ad1afc12870 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.747103] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1294.747395] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dfee5660-d5cb-4a34-a757-cab3e17ade6b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.753794] env[62522]: DEBUG oslo_vmware.api [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1294.753794] env[62522]: value = "task-2416480" [ 1294.753794] env[62522]: _type = "Task" [ 1294.753794] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.760941] env[62522]: DEBUG oslo_vmware.api [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416480, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.999816] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1295.065042] env[62522]: DEBUG nova.compute.manager [None req-98c57058-4ccb-4c3f-b0b2-be72ca61fb13 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1295.065804] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdca9a22-6585-430b-973f-b61bea7e7325 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.263096] env[62522]: DEBUG oslo_vmware.api [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416480, 'name': PowerOffVM_Task, 'duration_secs': 0.267796} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.263369] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1295.263584] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1295.263871] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-42feeeb2-baa2-4523-a425-8f5a43f78a34 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.324014] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1295.324246] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1295.324455] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Deleting the datastore file [datastore2] 961df2ff-bd02-45af-afb8-14a99cfea1de {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1295.324717] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1358b3eb-8d8f-4b82-8a68-b31851e67c0a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.331512] env[62522]: DEBUG oslo_vmware.api [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for the task: (returnval){ [ 1295.331512] env[62522]: value = "task-2416482" [ 1295.331512] env[62522]: _type = "Task" [ 1295.331512] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.338637] env[62522]: DEBUG oslo_vmware.api [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416482, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.506277] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1295.506454] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Starting heal instance info cache {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1295.841628] env[62522]: DEBUG oslo_vmware.api [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Task: {'id': task-2416482, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137378} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.841825] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1295.842029] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1295.842222] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1295.842398] env[62522]: INFO nova.compute.manager [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1295.842642] env[62522]: DEBUG oslo.service.loopingcall [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1295.842835] env[62522]: DEBUG nova.compute.manager [-] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1295.842935] env[62522]: DEBUG nova.network.neutron [-] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1296.009266] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Didn't find any instances for network info cache update. {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 1296.009536] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1296.009712] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1296.009866] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1296.010029] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1296.010172] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62522) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1296.081335] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e33290c2-d33f-4bfc-ac52-4a4581f071c3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.092923] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-98c57058-4ccb-4c3f-b0b2-be72ca61fb13 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Doing hard reboot of VM {{(pid=62522) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1296.093257] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-b5be7b9a-67b3-4182-b5bd-149d0bca195d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.101955] env[62522]: DEBUG oslo_vmware.api [None req-98c57058-4ccb-4c3f-b0b2-be72ca61fb13 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1296.101955] env[62522]: value = "task-2416483" [ 1296.101955] env[62522]: _type = "Task" [ 1296.101955] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.111551] env[62522]: DEBUG oslo_vmware.api [None req-98c57058-4ccb-4c3f-b0b2-be72ca61fb13 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416483, 'name': ResetVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.113770] env[62522]: DEBUG nova.compute.manager [req-abdc5e58-285c-443f-8e60-a66f253c3d6c req-84fff017-0e46-494e-8192-ec0a601e2def service nova] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Received event network-vif-deleted-a15f47bb-6d26-4faf-91e1-6ce27453f7bf {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1296.114027] env[62522]: INFO nova.compute.manager [req-abdc5e58-285c-443f-8e60-a66f253c3d6c req-84fff017-0e46-494e-8192-ec0a601e2def service nova] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Neutron deleted interface a15f47bb-6d26-4faf-91e1-6ce27453f7bf; detaching it from the instance and deleting it from the info cache [ 1296.114239] env[62522]: DEBUG nova.network.neutron [req-abdc5e58-285c-443f-8e60-a66f253c3d6c req-84fff017-0e46-494e-8192-ec0a601e2def service nova] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1296.587795] env[62522]: DEBUG nova.network.neutron [-] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1296.611430] env[62522]: DEBUG oslo_vmware.api [None req-98c57058-4ccb-4c3f-b0b2-be72ca61fb13 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416483, 'name': ResetVM_Task, 'duration_secs': 0.081567} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.611758] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-98c57058-4ccb-4c3f-b0b2-be72ca61fb13 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Did hard reboot of VM {{(pid=62522) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1296.611997] env[62522]: DEBUG nova.compute.manager [None req-98c57058-4ccb-4c3f-b0b2-be72ca61fb13 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1296.612758] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1926043e-fb1a-44f8-8b21-7a307457e4b8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.616901] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-479a440b-3f9e-44f5-9d3a-8ec818cb0c54 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.628628] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d71db7-dcba-461f-a770-18e9910a25cf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.656797] env[62522]: DEBUG nova.compute.manager [req-abdc5e58-285c-443f-8e60-a66f253c3d6c req-84fff017-0e46-494e-8192-ec0a601e2def service nova] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Detach interface failed, port_id=a15f47bb-6d26-4faf-91e1-6ce27453f7bf, reason: Instance 961df2ff-bd02-45af-afb8-14a99cfea1de could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1297.090356] env[62522]: INFO nova.compute.manager [-] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Took 1.25 seconds to deallocate network for instance. [ 1297.125556] env[62522]: DEBUG oslo_concurrency.lockutils [None req-98c57058-4ccb-4c3f-b0b2-be72ca61fb13 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "27f4b976-7dff-49b0-9b00-7515cb976e72" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.294s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1297.598018] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1297.598304] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1297.598529] env[62522]: DEBUG nova.objects.instance [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lazy-loading 'resources' on Instance uuid 961df2ff-bd02-45af-afb8-14a99cfea1de {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1298.209296] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e4d0483-2883-4e3f-a57b-74d48b4b5d19 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.218377] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b707ad31-6119-4089-ac6b-941e0fc22c11 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.248107] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07677988-b22b-474a-acc0-ffca0405ec90 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.255089] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6766f1df-1595-42a2-a166-cbefecd6b1e6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.267731] env[62522]: DEBUG nova.compute.provider_tree [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1298.770869] env[62522]: DEBUG nova.scheduler.client.report [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1299.277064] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.678s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1299.297622] env[62522]: INFO nova.scheduler.client.report [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Deleted allocations for instance 961df2ff-bd02-45af-afb8-14a99cfea1de [ 1299.806016] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6d40f7fc-8a4d-4843-8a13-57d336ae6b40 tempest-DeleteServersTestJSON-552527927 tempest-DeleteServersTestJSON-552527927-project-member] Lock "961df2ff-bd02-45af-afb8-14a99cfea1de" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.574s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1300.672226] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "f74196c1-b00f-4f42-84dc-17b21fa30374" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1300.672618] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "f74196c1-b00f-4f42-84dc-17b21fa30374" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1300.672695] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "f74196c1-b00f-4f42-84dc-17b21fa30374-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1300.672876] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "f74196c1-b00f-4f42-84dc-17b21fa30374-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1300.673065] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "f74196c1-b00f-4f42-84dc-17b21fa30374-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1300.675396] env[62522]: INFO nova.compute.manager [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Terminating instance [ 1301.179601] env[62522]: DEBUG nova.compute.manager [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1301.179810] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1301.180751] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15ea22e9-5a5c-40a7-9303-279dbd97c986 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.188434] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1301.188670] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a472fd6f-6546-489f-a2ac-f8f60e5f330c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.195221] env[62522]: DEBUG oslo_vmware.api [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1301.195221] env[62522]: value = "task-2416485" [ 1301.195221] env[62522]: _type = "Task" [ 1301.195221] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.202566] env[62522]: DEBUG oslo_vmware.api [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416485, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.705079] env[62522]: DEBUG oslo_vmware.api [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416485, 'name': PowerOffVM_Task, 'duration_secs': 0.155072} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.705450] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1301.705571] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1301.705774] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-87206932-31bb-4c81-9ccd-629cdc75f54e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.772199] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1301.772470] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1301.772632] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Deleting the datastore file [datastore2] f74196c1-b00f-4f42-84dc-17b21fa30374 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1301.772901] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dd1e10f3-4e9f-4e34-8564-b012e62f63d6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.783299] env[62522]: DEBUG oslo_vmware.api [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1301.783299] env[62522]: value = "task-2416487" [ 1301.783299] env[62522]: _type = "Task" [ 1301.783299] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.792050] env[62522]: DEBUG oslo_vmware.api [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416487, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.293232] env[62522]: DEBUG oslo_vmware.api [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416487, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142898} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.293835] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1302.293835] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1302.293835] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1302.294180] env[62522]: INFO nova.compute.manager [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1302.294180] env[62522]: DEBUG oslo.service.loopingcall [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1302.294368] env[62522]: DEBUG nova.compute.manager [-] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1302.294486] env[62522]: DEBUG nova.network.neutron [-] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1302.805793] env[62522]: DEBUG nova.compute.manager [req-486ce44d-4a72-4a52-a547-11b6279af885 req-a7e06e82-8e0f-49fd-a4b5-3f82b3bfa216 service nova] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Received event network-vif-deleted-14494be3-972e-4dae-a55d-bd5b458491d9 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1302.806070] env[62522]: INFO nova.compute.manager [req-486ce44d-4a72-4a52-a547-11b6279af885 req-a7e06e82-8e0f-49fd-a4b5-3f82b3bfa216 service nova] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Neutron deleted interface 14494be3-972e-4dae-a55d-bd5b458491d9; detaching it from the instance and deleting it from the info cache [ 1302.806266] env[62522]: DEBUG nova.network.neutron [req-486ce44d-4a72-4a52-a547-11b6279af885 req-a7e06e82-8e0f-49fd-a4b5-3f82b3bfa216 service nova] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1303.285925] env[62522]: DEBUG nova.network.neutron [-] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1303.308613] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e9ce992f-5615-411b-8867-63b875346d89 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.319419] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11305400-59c2-4f67-a9a7-a7efc1333634 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.348560] env[62522]: DEBUG nova.compute.manager [req-486ce44d-4a72-4a52-a547-11b6279af885 req-a7e06e82-8e0f-49fd-a4b5-3f82b3bfa216 service nova] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Detach interface failed, port_id=14494be3-972e-4dae-a55d-bd5b458491d9, reason: Instance f74196c1-b00f-4f42-84dc-17b21fa30374 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1303.789491] env[62522]: INFO nova.compute.manager [-] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Took 1.49 seconds to deallocate network for instance. [ 1304.296322] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1304.296612] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1304.296800] env[62522]: DEBUG nova.objects.instance [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lazy-loading 'resources' on Instance uuid f74196c1-b00f-4f42-84dc-17b21fa30374 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1304.907720] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ddd7458-8c34-481b-8660-4b1e99eb3038 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.915566] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43149a4d-e5e4-40f4-bd00-1032b6614c9d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.944946] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-615aafa3-f5af-4b00-ae58-7c8ad7fee6cc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.952424] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-832851d1-ac6e-460c-b179-94dd40ac0ba5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.966254] env[62522]: DEBUG nova.compute.provider_tree [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1305.470483] env[62522]: DEBUG nova.scheduler.client.report [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1305.976654] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.680s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1306.001845] env[62522]: INFO nova.scheduler.client.report [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Deleted allocations for instance f74196c1-b00f-4f42-84dc-17b21fa30374 [ 1306.510103] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6e9d45fd-be4a-4414-ac1c-c5cc5848eb62 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "f74196c1-b00f-4f42-84dc-17b21fa30374" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.837s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1307.763743] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f66886e6-b106-4be2-b20a-d24b5aaa3d03 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "a4cb5c19-9087-4354-9689-a99ae8924dc1" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1307.764132] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f66886e6-b106-4be2-b20a-d24b5aaa3d03 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "a4cb5c19-9087-4354-9689-a99ae8924dc1" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1308.267353] env[62522]: INFO nova.compute.manager [None req-f66886e6-b106-4be2-b20a-d24b5aaa3d03 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Detaching volume 792bdc79-fff9-48a5-b954-77d64857a962 [ 1308.296632] env[62522]: INFO nova.virt.block_device [None req-f66886e6-b106-4be2-b20a-d24b5aaa3d03 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Attempting to driver detach volume 792bdc79-fff9-48a5-b954-77d64857a962 from mountpoint /dev/sdb [ 1308.296998] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-f66886e6-b106-4be2-b20a-d24b5aaa3d03 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Volume detach. Driver type: vmdk {{(pid=62522) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1308.297088] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-f66886e6-b106-4be2-b20a-d24b5aaa3d03 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489864', 'volume_id': '792bdc79-fff9-48a5-b954-77d64857a962', 'name': 'volume-792bdc79-fff9-48a5-b954-77d64857a962', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a4cb5c19-9087-4354-9689-a99ae8924dc1', 'attached_at': '', 'detached_at': '', 'volume_id': '792bdc79-fff9-48a5-b954-77d64857a962', 'serial': '792bdc79-fff9-48a5-b954-77d64857a962'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1308.297940] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4be1fd32-0d51-456e-b99a-9f9be3fe73a8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.318641] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb02cb9b-36e0-46e1-9090-be7abe0ec95a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.325316] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fecb0f76-fa92-44f9-afaf-9efb6a40060b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.344871] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a1349ec-dbc8-4e09-8672-f14ec8fd96c5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.359741] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-f66886e6-b106-4be2-b20a-d24b5aaa3d03 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] The volume has not been displaced from its original location: [datastore1] volume-792bdc79-fff9-48a5-b954-77d64857a962/volume-792bdc79-fff9-48a5-b954-77d64857a962.vmdk. No consolidation needed. {{(pid=62522) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1308.364773] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-f66886e6-b106-4be2-b20a-d24b5aaa3d03 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Reconfiguring VM instance instance-0000006a to detach disk 2001 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1308.365039] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-199038f5-92ad-4fad-ad30-b3066913f813 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.382268] env[62522]: DEBUG oslo_vmware.api [None req-f66886e6-b106-4be2-b20a-d24b5aaa3d03 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1308.382268] env[62522]: value = "task-2416488" [ 1308.382268] env[62522]: _type = "Task" [ 1308.382268] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.389550] env[62522]: DEBUG oslo_vmware.api [None req-f66886e6-b106-4be2-b20a-d24b5aaa3d03 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416488, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.891592] env[62522]: DEBUG oslo_vmware.api [None req-f66886e6-b106-4be2-b20a-d24b5aaa3d03 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416488, 'name': ReconfigVM_Task, 'duration_secs': 0.25922} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.891957] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-f66886e6-b106-4be2-b20a-d24b5aaa3d03 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Reconfigured VM instance instance-0000006a to detach disk 2001 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1308.896447] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f73014f2-9134-49f6-87e4-c444ae354ef9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.910636] env[62522]: DEBUG oslo_vmware.api [None req-f66886e6-b106-4be2-b20a-d24b5aaa3d03 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1308.910636] env[62522]: value = "task-2416489" [ 1308.910636] env[62522]: _type = "Task" [ 1308.910636] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.918163] env[62522]: DEBUG oslo_vmware.api [None req-f66886e6-b106-4be2-b20a-d24b5aaa3d03 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416489, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.420387] env[62522]: DEBUG oslo_vmware.api [None req-f66886e6-b106-4be2-b20a-d24b5aaa3d03 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416489, 'name': ReconfigVM_Task, 'duration_secs': 0.129403} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.420667] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-f66886e6-b106-4be2-b20a-d24b5aaa3d03 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489864', 'volume_id': '792bdc79-fff9-48a5-b954-77d64857a962', 'name': 'volume-792bdc79-fff9-48a5-b954-77d64857a962', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a4cb5c19-9087-4354-9689-a99ae8924dc1', 'attached_at': '', 'detached_at': '', 'volume_id': '792bdc79-fff9-48a5-b954-77d64857a962', 'serial': '792bdc79-fff9-48a5-b954-77d64857a962'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1309.960331] env[62522]: DEBUG nova.objects.instance [None req-f66886e6-b106-4be2-b20a-d24b5aaa3d03 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lazy-loading 'flavor' on Instance uuid a4cb5c19-9087-4354-9689-a99ae8924dc1 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1310.967369] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f66886e6-b106-4be2-b20a-d24b5aaa3d03 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "a4cb5c19-9087-4354-9689-a99ae8924dc1" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.203s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1312.000106] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "a4cb5c19-9087-4354-9689-a99ae8924dc1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1312.000458] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "a4cb5c19-9087-4354-9689-a99ae8924dc1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1312.000563] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "a4cb5c19-9087-4354-9689-a99ae8924dc1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1312.000725] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "a4cb5c19-9087-4354-9689-a99ae8924dc1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1312.000902] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "a4cb5c19-9087-4354-9689-a99ae8924dc1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1312.003026] env[62522]: INFO nova.compute.manager [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Terminating instance [ 1312.506930] env[62522]: DEBUG nova.compute.manager [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1312.507190] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1312.508119] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6279357d-7de4-451a-9e15-db0a6417cf8e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.516081] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1312.516270] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1979d9d1-ae1d-4be9-9a05-7bb953c41d60 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.522316] env[62522]: DEBUG oslo_vmware.api [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1312.522316] env[62522]: value = "task-2416490" [ 1312.522316] env[62522]: _type = "Task" [ 1312.522316] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.530118] env[62522]: DEBUG oslo_vmware.api [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416490, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.032283] env[62522]: DEBUG oslo_vmware.api [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416490, 'name': PowerOffVM_Task, 'duration_secs': 0.163416} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.032675] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1313.032891] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1313.033027] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-41475b05-59af-4cc2-b797-9a8e29a83f75 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.096552] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1313.096791] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1313.096975] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Deleting the datastore file [datastore2] a4cb5c19-9087-4354-9689-a99ae8924dc1 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1313.097263] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-43db229a-021d-4288-bc03-5db203dece20 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.103830] env[62522]: DEBUG oslo_vmware.api [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1313.103830] env[62522]: value = "task-2416492" [ 1313.103830] env[62522]: _type = "Task" [ 1313.103830] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.113132] env[62522]: DEBUG oslo_vmware.api [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416492, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.613565] env[62522]: DEBUG oslo_vmware.api [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416492, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132534} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.613826] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1313.613994] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1313.614193] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1313.614370] env[62522]: INFO nova.compute.manager [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1313.614648] env[62522]: DEBUG oslo.service.loopingcall [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1313.614851] env[62522]: DEBUG nova.compute.manager [-] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1313.614947] env[62522]: DEBUG nova.network.neutron [-] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1313.700718] env[62522]: DEBUG oslo_concurrency.lockutils [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "cb7a19f1-6093-47ee-bbbc-a75dd5423f32" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1313.700978] env[62522]: DEBUG oslo_concurrency.lockutils [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "cb7a19f1-6093-47ee-bbbc-a75dd5423f32" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1313.701215] env[62522]: DEBUG oslo_concurrency.lockutils [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "cb7a19f1-6093-47ee-bbbc-a75dd5423f32-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1313.701403] env[62522]: DEBUG oslo_concurrency.lockutils [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "cb7a19f1-6093-47ee-bbbc-a75dd5423f32-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1313.701575] env[62522]: DEBUG oslo_concurrency.lockutils [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "cb7a19f1-6093-47ee-bbbc-a75dd5423f32-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1313.703721] env[62522]: INFO nova.compute.manager [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Terminating instance [ 1314.075525] env[62522]: DEBUG nova.compute.manager [req-6d21b4c3-2819-4f87-ae4d-7c42c3087d33 req-a542a081-8d59-41c4-b498-6d06e6887732 service nova] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Received event network-vif-deleted-fb503ded-334f-4a04-b774-61284edf466f {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1314.075779] env[62522]: INFO nova.compute.manager [req-6d21b4c3-2819-4f87-ae4d-7c42c3087d33 req-a542a081-8d59-41c4-b498-6d06e6887732 service nova] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Neutron deleted interface fb503ded-334f-4a04-b774-61284edf466f; detaching it from the instance and deleting it from the info cache [ 1314.075930] env[62522]: DEBUG nova.network.neutron [req-6d21b4c3-2819-4f87-ae4d-7c42c3087d33 req-a542a081-8d59-41c4-b498-6d06e6887732 service nova] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1314.207575] env[62522]: DEBUG nova.compute.manager [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1314.207766] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1314.208966] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf145415-1036-41f3-bf3a-c168b9df4b04 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.217785] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1314.218027] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4f825e0b-81bd-46a0-8f43-e15653eaeb16 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.224073] env[62522]: DEBUG oslo_vmware.api [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1314.224073] env[62522]: value = "task-2416493" [ 1314.224073] env[62522]: _type = "Task" [ 1314.224073] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.231644] env[62522]: DEBUG oslo_vmware.api [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416493, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.559627] env[62522]: DEBUG nova.network.neutron [-] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1314.578300] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c6a6dd46-483e-46e1-aabc-f4df91359bdc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.588057] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2215e8c0-039c-40e3-9217-212d941e6d05 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.615126] env[62522]: DEBUG nova.compute.manager [req-6d21b4c3-2819-4f87-ae4d-7c42c3087d33 req-a542a081-8d59-41c4-b498-6d06e6887732 service nova] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Detach interface failed, port_id=fb503ded-334f-4a04-b774-61284edf466f, reason: Instance a4cb5c19-9087-4354-9689-a99ae8924dc1 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1314.736108] env[62522]: DEBUG oslo_vmware.api [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416493, 'name': PowerOffVM_Task, 'duration_secs': 0.216048} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.736427] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1314.736611] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1314.736896] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b639469f-ca6c-4b37-9542-998e19658b69 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.062324] env[62522]: INFO nova.compute.manager [-] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Took 1.45 seconds to deallocate network for instance. [ 1315.196235] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1315.196571] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1315.196820] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Deleting the datastore file [datastore1] cb7a19f1-6093-47ee-bbbc-a75dd5423f32 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1315.196931] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-128fb568-bff1-45b7-9c54-45257dd75651 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.203706] env[62522]: DEBUG oslo_vmware.api [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for the task: (returnval){ [ 1315.203706] env[62522]: value = "task-2416495" [ 1315.203706] env[62522]: _type = "Task" [ 1315.203706] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.211381] env[62522]: DEBUG oslo_vmware.api [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416495, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.569203] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1315.569474] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1315.569694] env[62522]: DEBUG nova.objects.instance [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lazy-loading 'resources' on Instance uuid a4cb5c19-9087-4354-9689-a99ae8924dc1 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1315.713841] env[62522]: DEBUG oslo_vmware.api [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Task: {'id': task-2416495, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.360602} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.714032] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1315.714218] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1315.714390] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1315.714576] env[62522]: INFO nova.compute.manager [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Took 1.51 seconds to destroy the instance on the hypervisor. [ 1315.714808] env[62522]: DEBUG oslo.service.loopingcall [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1315.714993] env[62522]: DEBUG nova.compute.manager [-] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1315.715099] env[62522]: DEBUG nova.network.neutron [-] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1316.145868] env[62522]: DEBUG nova.compute.manager [req-1efda81a-980d-4b89-9274-e182d6072676 req-2e3d3a94-b905-4ad4-bde1-c78ad0be9659 service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Received event network-vif-deleted-33665d0f-b7dd-4d62-86d5-8ccb8f178e97 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1316.146070] env[62522]: INFO nova.compute.manager [req-1efda81a-980d-4b89-9274-e182d6072676 req-2e3d3a94-b905-4ad4-bde1-c78ad0be9659 service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Neutron deleted interface 33665d0f-b7dd-4d62-86d5-8ccb8f178e97; detaching it from the instance and deleting it from the info cache [ 1316.146265] env[62522]: DEBUG nova.network.neutron [req-1efda81a-980d-4b89-9274-e182d6072676 req-2e3d3a94-b905-4ad4-bde1-c78ad0be9659 service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1316.172027] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6c816f9-5206-4200-9df4-50f914be7946 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.180963] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f2a9d13-02a5-4373-bd33-47d4a660ac2e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.210687] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a5ff973-9a5a-420f-a443-1c5902ca7d01 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.218460] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-201efa6b-39c4-4601-b96f-1d097fdeb101 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.232803] env[62522]: DEBUG nova.compute.provider_tree [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1316.624957] env[62522]: DEBUG nova.network.neutron [-] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1316.649480] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-54be870e-be0f-42b3-8196-7630a95fd857 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.659594] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e9e0f01-200e-433f-82b5-3dd84f5eeac5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.687418] env[62522]: DEBUG nova.compute.manager [req-1efda81a-980d-4b89-9274-e182d6072676 req-2e3d3a94-b905-4ad4-bde1-c78ad0be9659 service nova] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Detach interface failed, port_id=33665d0f-b7dd-4d62-86d5-8ccb8f178e97, reason: Instance cb7a19f1-6093-47ee-bbbc-a75dd5423f32 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1316.737026] env[62522]: DEBUG nova.scheduler.client.report [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1317.127525] env[62522]: INFO nova.compute.manager [-] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Took 1.41 seconds to deallocate network for instance. [ 1317.241411] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.672s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1317.261443] env[62522]: INFO nova.scheduler.client.report [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Deleted allocations for instance a4cb5c19-9087-4354-9689-a99ae8924dc1 [ 1317.635221] env[62522]: DEBUG oslo_concurrency.lockutils [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1317.635516] env[62522]: DEBUG oslo_concurrency.lockutils [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1317.635819] env[62522]: DEBUG nova.objects.instance [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lazy-loading 'resources' on Instance uuid cb7a19f1-6093-47ee-bbbc-a75dd5423f32 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1317.768440] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eb7fd789-e4be-4243-9eba-1a816bcdf9fa tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "a4cb5c19-9087-4354-9689-a99ae8924dc1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.768s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1318.207537] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e55699c2-94e1-475e-bb9b-c870a1ea2e57 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.216036] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-480f4697-585d-49af-8d8e-491ef2b43e8d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.246216] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de631154-da65-4a62-8755-9431188c8b54 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.253260] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c848a0f-32fc-4eaf-8670-db791df4d5e6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.266022] env[62522]: DEBUG nova.compute.provider_tree [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1318.770550] env[62522]: DEBUG nova.scheduler.client.report [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1319.274633] env[62522]: DEBUG oslo_concurrency.lockutils [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.639s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.297238] env[62522]: INFO nova.scheduler.client.report [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Deleted allocations for instance cb7a19f1-6093-47ee-bbbc-a75dd5423f32 [ 1319.804815] env[62522]: DEBUG oslo_concurrency.lockutils [None req-91f6f4de-f089-4e8d-a65d-9ad7f8e56e86 tempest-AttachVolumeShelveTestJSON-438713565 tempest-AttachVolumeShelveTestJSON-438713565-project-member] Lock "cb7a19f1-6093-47ee-bbbc-a75dd5423f32" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.104s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.957231] env[62522]: DEBUG oslo_concurrency.lockutils [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "ecc70761-8f69-48f6-8e81-7d2ba3728c70" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1319.957231] env[62522]: DEBUG oslo_concurrency.lockutils [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "ecc70761-8f69-48f6-8e81-7d2ba3728c70" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1320.376714] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "da11bae6-484b-455e-9462-6f5143d2a9a9" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1320.377037] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "da11bae6-484b-455e-9462-6f5143d2a9a9" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1320.377194] env[62522]: INFO nova.compute.manager [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Shelving [ 1320.459081] env[62522]: DEBUG nova.compute.manager [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1320.983283] env[62522]: DEBUG oslo_concurrency.lockutils [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1320.983715] env[62522]: DEBUG oslo_concurrency.lockutils [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1320.986186] env[62522]: INFO nova.compute.claims [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1321.387068] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1321.387470] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4ca346f2-d662-452d-b059-f8423ac1b0f8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.395805] env[62522]: DEBUG oslo_vmware.api [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1321.395805] env[62522]: value = "task-2416497" [ 1321.395805] env[62522]: _type = "Task" [ 1321.395805] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.404315] env[62522]: DEBUG oslo_vmware.api [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416497, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.905892] env[62522]: DEBUG oslo_vmware.api [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416497, 'name': PowerOffVM_Task, 'duration_secs': 0.165163} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.906210] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1321.907116] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b8cc8e2-7042-460d-9793-33df49174581 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.924482] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f42ad3ca-0291-4d51-88e5-4cb1f7afbfc5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.063367] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eda4d75-66dc-42de-8b6d-deefc457a243 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.070698] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14779b71-b099-4508-b51f-8ac195403f64 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.101394] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1f1b409-3da0-4d7e-962d-4f5146eaac05 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.108358] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e06a9f7-9880-4509-9981-6a9feef6e2a6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.121149] env[62522]: DEBUG nova.compute.provider_tree [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1322.435064] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Creating Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1322.435064] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e77111fb-b52b-43f8-b200-8c1fb034edd4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.443118] env[62522]: DEBUG oslo_vmware.api [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1322.443118] env[62522]: value = "task-2416498" [ 1322.443118] env[62522]: _type = "Task" [ 1322.443118] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.451347] env[62522]: DEBUG oslo_vmware.api [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416498, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.624749] env[62522]: DEBUG nova.scheduler.client.report [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1322.638971] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0b56c3da-f6d8-443d-9817-6f3f67298d25 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "7406a1a4-a342-475b-ad02-6a29f7c487ee" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1322.639228] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0b56c3da-f6d8-443d-9817-6f3f67298d25 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "7406a1a4-a342-475b-ad02-6a29f7c487ee" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1322.953107] env[62522]: DEBUG oslo_vmware.api [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416498, 'name': CreateSnapshot_Task, 'duration_secs': 0.381672} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1322.953401] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Created Snapshot of the VM instance {{(pid=62522) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1322.954155] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46ddfaa9-749a-439b-9a80-c7a88f46752f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.129490] env[62522]: DEBUG oslo_concurrency.lockutils [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.146s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1323.130030] env[62522]: DEBUG nova.compute.manager [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1323.141613] env[62522]: INFO nova.compute.manager [None req-0b56c3da-f6d8-443d-9817-6f3f67298d25 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Detaching volume da41f036-456a-409e-a359-6157800d323c [ 1323.171139] env[62522]: INFO nova.virt.block_device [None req-0b56c3da-f6d8-443d-9817-6f3f67298d25 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Attempting to driver detach volume da41f036-456a-409e-a359-6157800d323c from mountpoint /dev/sdb [ 1323.171388] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b56c3da-f6d8-443d-9817-6f3f67298d25 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Volume detach. Driver type: vmdk {{(pid=62522) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1323.171576] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b56c3da-f6d8-443d-9817-6f3f67298d25 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489869', 'volume_id': 'da41f036-456a-409e-a359-6157800d323c', 'name': 'volume-da41f036-456a-409e-a359-6157800d323c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7406a1a4-a342-475b-ad02-6a29f7c487ee', 'attached_at': '', 'detached_at': '', 'volume_id': 'da41f036-456a-409e-a359-6157800d323c', 'serial': 'da41f036-456a-409e-a359-6157800d323c'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1323.172994] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca63d5c4-f7b1-456d-afe7-357f46c918ba {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.195054] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7872792b-8935-4997-ab98-b486ab962332 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.201515] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-592ba8bb-2483-426f-a7ac-1aaaab7da432 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.220580] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29f2bce4-efd4-4584-88c1-35531f06486c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.234059] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b56c3da-f6d8-443d-9817-6f3f67298d25 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] The volume has not been displaced from its original location: [datastore1] volume-da41f036-456a-409e-a359-6157800d323c/volume-da41f036-456a-409e-a359-6157800d323c.vmdk. No consolidation needed. {{(pid=62522) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1323.239213] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b56c3da-f6d8-443d-9817-6f3f67298d25 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Reconfiguring VM instance instance-0000006d to detach disk 2001 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1323.239461] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ea259404-91bc-4a3c-a309-f1fe1ce303f2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.256206] env[62522]: DEBUG oslo_vmware.api [None req-0b56c3da-f6d8-443d-9817-6f3f67298d25 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1323.256206] env[62522]: value = "task-2416499" [ 1323.256206] env[62522]: _type = "Task" [ 1323.256206] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.263373] env[62522]: DEBUG oslo_vmware.api [None req-0b56c3da-f6d8-443d-9817-6f3f67298d25 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416499, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.472096] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Creating linked-clone VM from snapshot {{(pid=62522) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1323.472096] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-39ecce4f-c2b3-4da7-90d6-a8d76badde76 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.479570] env[62522]: DEBUG oslo_vmware.api [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1323.479570] env[62522]: value = "task-2416500" [ 1323.479570] env[62522]: _type = "Task" [ 1323.479570] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.487453] env[62522]: DEBUG oslo_vmware.api [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416500, 'name': CloneVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.635550] env[62522]: DEBUG nova.compute.utils [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1323.637212] env[62522]: DEBUG nova.compute.manager [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1323.637391] env[62522]: DEBUG nova.network.neutron [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1323.687246] env[62522]: DEBUG nova.policy [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'edec975faaef4f2ba31aa0de30590522', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fa792663b4ac41b7bf4c5e4b290f9b86', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1323.766931] env[62522]: DEBUG oslo_vmware.api [None req-0b56c3da-f6d8-443d-9817-6f3f67298d25 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416499, 'name': ReconfigVM_Task, 'duration_secs': 0.233234} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.767227] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b56c3da-f6d8-443d-9817-6f3f67298d25 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Reconfigured VM instance instance-0000006d to detach disk 2001 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1323.771736] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5f7e009a-4d08-43b8-80d6-e15003ef99dd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.787915] env[62522]: DEBUG oslo_vmware.api [None req-0b56c3da-f6d8-443d-9817-6f3f67298d25 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1323.787915] env[62522]: value = "task-2416501" [ 1323.787915] env[62522]: _type = "Task" [ 1323.787915] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.796237] env[62522]: DEBUG oslo_vmware.api [None req-0b56c3da-f6d8-443d-9817-6f3f67298d25 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416501, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.989506] env[62522]: DEBUG oslo_vmware.api [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416500, 'name': CloneVM_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.144955] env[62522]: DEBUG nova.compute.manager [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1324.173121] env[62522]: DEBUG nova.network.neutron [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Successfully created port: 0ae0220d-f10b-4927-b124-35afaa7bc701 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1324.296760] env[62522]: DEBUG oslo_vmware.api [None req-0b56c3da-f6d8-443d-9817-6f3f67298d25 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416501, 'name': ReconfigVM_Task, 'duration_secs': 0.263071} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.297185] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b56c3da-f6d8-443d-9817-6f3f67298d25 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489869', 'volume_id': 'da41f036-456a-409e-a359-6157800d323c', 'name': 'volume-da41f036-456a-409e-a359-6157800d323c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7406a1a4-a342-475b-ad02-6a29f7c487ee', 'attached_at': '', 'detached_at': '', 'volume_id': 'da41f036-456a-409e-a359-6157800d323c', 'serial': 'da41f036-456a-409e-a359-6157800d323c'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1324.490870] env[62522]: DEBUG oslo_vmware.api [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416500, 'name': CloneVM_Task, 'duration_secs': 0.808877} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.491307] env[62522]: INFO nova.virt.vmwareapi.vmops [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Created linked-clone VM from snapshot [ 1324.491923] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-216a5521-50d4-4540-ac5e-b1485d4ba526 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.499143] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Uploading image 414dd827-3fec-484e-897f-aa53cdec2e35 {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1324.523385] env[62522]: DEBUG oslo_vmware.rw_handles [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1324.523385] env[62522]: value = "vm-489875" [ 1324.523385] env[62522]: _type = "VirtualMachine" [ 1324.523385] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1324.523647] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-42547512-252c-4dcc-a65b-3062b74c004e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.530652] env[62522]: DEBUG oslo_vmware.rw_handles [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lease: (returnval){ [ 1324.530652] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52706d7b-23f4-d21e-c63d-d0cf9ba9b324" [ 1324.530652] env[62522]: _type = "HttpNfcLease" [ 1324.530652] env[62522]: } obtained for exporting VM: (result){ [ 1324.530652] env[62522]: value = "vm-489875" [ 1324.530652] env[62522]: _type = "VirtualMachine" [ 1324.530652] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1324.531039] env[62522]: DEBUG oslo_vmware.api [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the lease: (returnval){ [ 1324.531039] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52706d7b-23f4-d21e-c63d-d0cf9ba9b324" [ 1324.531039] env[62522]: _type = "HttpNfcLease" [ 1324.531039] env[62522]: } to be ready. {{(pid=62522) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1324.536864] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1324.536864] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52706d7b-23f4-d21e-c63d-d0cf9ba9b324" [ 1324.536864] env[62522]: _type = "HttpNfcLease" [ 1324.536864] env[62522]: } is initializing. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1324.844916] env[62522]: DEBUG nova.objects.instance [None req-0b56c3da-f6d8-443d-9817-6f3f67298d25 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lazy-loading 'flavor' on Instance uuid 7406a1a4-a342-475b-ad02-6a29f7c487ee {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1325.042382] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1325.042382] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52706d7b-23f4-d21e-c63d-d0cf9ba9b324" [ 1325.042382] env[62522]: _type = "HttpNfcLease" [ 1325.042382] env[62522]: } is ready. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1325.042703] env[62522]: DEBUG oslo_vmware.rw_handles [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1325.042703] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52706d7b-23f4-d21e-c63d-d0cf9ba9b324" [ 1325.042703] env[62522]: _type = "HttpNfcLease" [ 1325.042703] env[62522]: }. {{(pid=62522) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1325.043441] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaabe331-0191-4a97-99b2-10fa5f1f0582 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.050522] env[62522]: DEBUG oslo_vmware.rw_handles [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c64d01-b250-0528-b12c-af5d70d7495a/disk-0.vmdk from lease info. {{(pid=62522) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1325.050693] env[62522]: DEBUG oslo_vmware.rw_handles [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c64d01-b250-0528-b12c-af5d70d7495a/disk-0.vmdk for reading. {{(pid=62522) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1325.151621] env[62522]: DEBUG nova.compute.manager [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1325.177598] env[62522]: DEBUG nova.virt.hardware [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1325.177841] env[62522]: DEBUG nova.virt.hardware [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1325.177999] env[62522]: DEBUG nova.virt.hardware [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1325.178293] env[62522]: DEBUG nova.virt.hardware [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1325.178447] env[62522]: DEBUG nova.virt.hardware [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1325.178597] env[62522]: DEBUG nova.virt.hardware [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1325.178807] env[62522]: DEBUG nova.virt.hardware [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1325.178967] env[62522]: DEBUG nova.virt.hardware [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1325.179153] env[62522]: DEBUG nova.virt.hardware [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1325.179319] env[62522]: DEBUG nova.virt.hardware [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1325.179492] env[62522]: DEBUG nova.virt.hardware [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1325.180358] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd7a740f-8041-41ee-ad97-c0157254962b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.189177] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22569dbe-3fec-4578-8a64-6ebba61403e6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.210126] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3ac30562-9dfc-4feb-9903-e4cf6b1b2b47 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.665819] env[62522]: DEBUG nova.compute.manager [req-84289d73-f0e8-4fda-9eb6-d438dd268648 req-7ecfaa6a-69e3-4158-9e82-04340240d3a8 service nova] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Received event network-vif-plugged-0ae0220d-f10b-4927-b124-35afaa7bc701 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1325.666578] env[62522]: DEBUG oslo_concurrency.lockutils [req-84289d73-f0e8-4fda-9eb6-d438dd268648 req-7ecfaa6a-69e3-4158-9e82-04340240d3a8 service nova] Acquiring lock "ecc70761-8f69-48f6-8e81-7d2ba3728c70-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1325.666954] env[62522]: DEBUG oslo_concurrency.lockutils [req-84289d73-f0e8-4fda-9eb6-d438dd268648 req-7ecfaa6a-69e3-4158-9e82-04340240d3a8 service nova] Lock "ecc70761-8f69-48f6-8e81-7d2ba3728c70-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1325.667162] env[62522]: DEBUG oslo_concurrency.lockutils [req-84289d73-f0e8-4fda-9eb6-d438dd268648 req-7ecfaa6a-69e3-4158-9e82-04340240d3a8 service nova] Lock "ecc70761-8f69-48f6-8e81-7d2ba3728c70-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1325.667529] env[62522]: DEBUG nova.compute.manager [req-84289d73-f0e8-4fda-9eb6-d438dd268648 req-7ecfaa6a-69e3-4158-9e82-04340240d3a8 service nova] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] No waiting events found dispatching network-vif-plugged-0ae0220d-f10b-4927-b124-35afaa7bc701 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1325.667739] env[62522]: WARNING nova.compute.manager [req-84289d73-f0e8-4fda-9eb6-d438dd268648 req-7ecfaa6a-69e3-4158-9e82-04340240d3a8 service nova] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Received unexpected event network-vif-plugged-0ae0220d-f10b-4927-b124-35afaa7bc701 for instance with vm_state building and task_state spawning. [ 1325.781484] env[62522]: DEBUG nova.network.neutron [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Successfully updated port: 0ae0220d-f10b-4927-b124-35afaa7bc701 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1325.855055] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0b56c3da-f6d8-443d-9817-6f3f67298d25 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "7406a1a4-a342-475b-ad02-6a29f7c487ee" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.215s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1326.289714] env[62522]: DEBUG oslo_concurrency.lockutils [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "refresh_cache-ecc70761-8f69-48f6-8e81-7d2ba3728c70" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1326.290024] env[62522]: DEBUG oslo_concurrency.lockutils [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquired lock "refresh_cache-ecc70761-8f69-48f6-8e81-7d2ba3728c70" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1326.290097] env[62522]: DEBUG nova.network.neutron [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1326.821152] env[62522]: DEBUG nova.network.neutron [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1326.929188] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "7406a1a4-a342-475b-ad02-6a29f7c487ee" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1326.929188] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "7406a1a4-a342-475b-ad02-6a29f7c487ee" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1326.929427] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "7406a1a4-a342-475b-ad02-6a29f7c487ee-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1326.929617] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "7406a1a4-a342-475b-ad02-6a29f7c487ee-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1326.929792] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "7406a1a4-a342-475b-ad02-6a29f7c487ee-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1326.932264] env[62522]: INFO nova.compute.manager [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Terminating instance [ 1327.014240] env[62522]: DEBUG nova.network.neutron [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Updating instance_info_cache with network_info: [{"id": "0ae0220d-f10b-4927-b124-35afaa7bc701", "address": "fa:16:3e:8b:a8:5b", "network": {"id": "2c9c537f-91b6-4217-8eaf-dc187f4ce7d5", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1154766161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fa792663b4ac41b7bf4c5e4b290f9b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ae0220d-f1", "ovs_interfaceid": "0ae0220d-f10b-4927-b124-35afaa7bc701", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1327.439449] env[62522]: DEBUG nova.compute.manager [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1327.439696] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1327.440638] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0317f58-62f9-41e6-b0f4-fc167619b1df {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.450134] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1327.450427] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d00a3d5b-ae86-42db-9e10-ccd81aabb760 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.458943] env[62522]: DEBUG oslo_vmware.api [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1327.458943] env[62522]: value = "task-2416503" [ 1327.458943] env[62522]: _type = "Task" [ 1327.458943] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.467259] env[62522]: DEBUG oslo_vmware.api [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416503, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.517314] env[62522]: DEBUG oslo_concurrency.lockutils [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Releasing lock "refresh_cache-ecc70761-8f69-48f6-8e81-7d2ba3728c70" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1327.517657] env[62522]: DEBUG nova.compute.manager [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Instance network_info: |[{"id": "0ae0220d-f10b-4927-b124-35afaa7bc701", "address": "fa:16:3e:8b:a8:5b", "network": {"id": "2c9c537f-91b6-4217-8eaf-dc187f4ce7d5", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1154766161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fa792663b4ac41b7bf4c5e4b290f9b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ae0220d-f1", "ovs_interfaceid": "0ae0220d-f10b-4927-b124-35afaa7bc701", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1327.518191] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:a8:5b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc7aa55d-223a-4157-9137-88dc492f2db2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0ae0220d-f10b-4927-b124-35afaa7bc701', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1327.526040] env[62522]: DEBUG oslo.service.loopingcall [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1327.526293] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1327.526574] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-627c2bd0-250c-4869-a548-cc8f764ae369 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.548640] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1327.548640] env[62522]: value = "task-2416504" [ 1327.548640] env[62522]: _type = "Task" [ 1327.548640] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.558521] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416504, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.701316] env[62522]: DEBUG nova.compute.manager [req-894ad2a3-9b40-4ce4-8d7c-9a49a9b2200d req-d378b446-de2e-43be-a06b-e8c6f50a0793 service nova] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Received event network-changed-0ae0220d-f10b-4927-b124-35afaa7bc701 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1327.701518] env[62522]: DEBUG nova.compute.manager [req-894ad2a3-9b40-4ce4-8d7c-9a49a9b2200d req-d378b446-de2e-43be-a06b-e8c6f50a0793 service nova] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Refreshing instance network info cache due to event network-changed-0ae0220d-f10b-4927-b124-35afaa7bc701. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1327.701788] env[62522]: DEBUG oslo_concurrency.lockutils [req-894ad2a3-9b40-4ce4-8d7c-9a49a9b2200d req-d378b446-de2e-43be-a06b-e8c6f50a0793 service nova] Acquiring lock "refresh_cache-ecc70761-8f69-48f6-8e81-7d2ba3728c70" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1327.701940] env[62522]: DEBUG oslo_concurrency.lockutils [req-894ad2a3-9b40-4ce4-8d7c-9a49a9b2200d req-d378b446-de2e-43be-a06b-e8c6f50a0793 service nova] Acquired lock "refresh_cache-ecc70761-8f69-48f6-8e81-7d2ba3728c70" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1327.702131] env[62522]: DEBUG nova.network.neutron [req-894ad2a3-9b40-4ce4-8d7c-9a49a9b2200d req-d378b446-de2e-43be-a06b-e8c6f50a0793 service nova] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Refreshing network info cache for port 0ae0220d-f10b-4927-b124-35afaa7bc701 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1327.968423] env[62522]: DEBUG oslo_vmware.api [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416503, 'name': PowerOffVM_Task, 'duration_secs': 0.245732} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.968818] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1327.968880] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1327.969131] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5b535c22-19d0-4af7-b621-94e696acd807 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.034137] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1328.034137] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1328.034137] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Deleting the datastore file [datastore1] 7406a1a4-a342-475b-ad02-6a29f7c487ee {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1328.034137] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a4a05c18-d068-4714-a0ef-bc8e86a70a2d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.041642] env[62522]: DEBUG oslo_vmware.api [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1328.041642] env[62522]: value = "task-2416506" [ 1328.041642] env[62522]: _type = "Task" [ 1328.041642] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.049316] env[62522]: DEBUG oslo_vmware.api [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416506, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.057638] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416504, 'name': CreateVM_Task, 'duration_secs': 0.330081} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.057866] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1328.058636] env[62522]: DEBUG oslo_concurrency.lockutils [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1328.058850] env[62522]: DEBUG oslo_concurrency.lockutils [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1328.059231] env[62522]: DEBUG oslo_concurrency.lockutils [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1328.059525] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d9edf0d-eab8-410d-9187-7117111b4b5b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.064193] env[62522]: DEBUG oslo_vmware.api [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1328.064193] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52dfd68e-0d38-c32e-9e3a-7ab9fb7195b8" [ 1328.064193] env[62522]: _type = "Task" [ 1328.064193] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.073372] env[62522]: DEBUG oslo_vmware.api [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52dfd68e-0d38-c32e-9e3a-7ab9fb7195b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.526056] env[62522]: DEBUG nova.network.neutron [req-894ad2a3-9b40-4ce4-8d7c-9a49a9b2200d req-d378b446-de2e-43be-a06b-e8c6f50a0793 service nova] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Updated VIF entry in instance network info cache for port 0ae0220d-f10b-4927-b124-35afaa7bc701. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1328.526427] env[62522]: DEBUG nova.network.neutron [req-894ad2a3-9b40-4ce4-8d7c-9a49a9b2200d req-d378b446-de2e-43be-a06b-e8c6f50a0793 service nova] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Updating instance_info_cache with network_info: [{"id": "0ae0220d-f10b-4927-b124-35afaa7bc701", "address": "fa:16:3e:8b:a8:5b", "network": {"id": "2c9c537f-91b6-4217-8eaf-dc187f4ce7d5", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1154766161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fa792663b4ac41b7bf4c5e4b290f9b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ae0220d-f1", "ovs_interfaceid": "0ae0220d-f10b-4927-b124-35afaa7bc701", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1328.553687] env[62522]: DEBUG oslo_vmware.api [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416506, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171895} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.554148] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1328.554211] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1328.554420] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1328.554601] env[62522]: INFO nova.compute.manager [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1328.554878] env[62522]: DEBUG oslo.service.loopingcall [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1328.555094] env[62522]: DEBUG nova.compute.manager [-] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1328.555195] env[62522]: DEBUG nova.network.neutron [-] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1328.574732] env[62522]: DEBUG oslo_vmware.api [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52dfd68e-0d38-c32e-9e3a-7ab9fb7195b8, 'name': SearchDatastore_Task, 'duration_secs': 0.009595} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.575080] env[62522]: DEBUG oslo_concurrency.lockutils [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1328.575337] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1328.575582] env[62522]: DEBUG oslo_concurrency.lockutils [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1328.575754] env[62522]: DEBUG oslo_concurrency.lockutils [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1328.575951] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1328.576237] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2ca27d28-a789-492a-a4c5-d3df62b41ab3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.584943] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1328.585655] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1328.585883] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a2995c2-1e1c-4980-abf4-7ec655a80cb4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.593182] env[62522]: DEBUG oslo_vmware.api [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1328.593182] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]527dc86f-203e-4c66-9a2b-8e1542b5d8d6" [ 1328.593182] env[62522]: _type = "Task" [ 1328.593182] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.602442] env[62522]: DEBUG oslo_vmware.api [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]527dc86f-203e-4c66-9a2b-8e1542b5d8d6, 'name': SearchDatastore_Task, 'duration_secs': 0.008696} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.603230] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0399db52-e46c-40a8-9793-71b6a8308c74 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.608363] env[62522]: DEBUG oslo_vmware.api [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1328.608363] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52483c7f-9fdc-b990-98e4-a2e806e7a142" [ 1328.608363] env[62522]: _type = "Task" [ 1328.608363] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.616177] env[62522]: DEBUG oslo_vmware.api [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52483c7f-9fdc-b990-98e4-a2e806e7a142, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.029067] env[62522]: DEBUG oslo_concurrency.lockutils [req-894ad2a3-9b40-4ce4-8d7c-9a49a9b2200d req-d378b446-de2e-43be-a06b-e8c6f50a0793 service nova] Releasing lock "refresh_cache-ecc70761-8f69-48f6-8e81-7d2ba3728c70" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1329.118139] env[62522]: DEBUG oslo_vmware.api [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52483c7f-9fdc-b990-98e4-a2e806e7a142, 'name': SearchDatastore_Task, 'duration_secs': 0.008114} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.118405] env[62522]: DEBUG oslo_concurrency.lockutils [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1329.118654] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] ecc70761-8f69-48f6-8e81-7d2ba3728c70/ecc70761-8f69-48f6-8e81-7d2ba3728c70.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1329.118905] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0ad5c552-f77b-42c0-a530-22242a9e5fdb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.126671] env[62522]: DEBUG oslo_vmware.api [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1329.126671] env[62522]: value = "task-2416507" [ 1329.126671] env[62522]: _type = "Task" [ 1329.126671] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.134352] env[62522]: DEBUG oslo_vmware.api [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416507, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.217664] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "fd9af7c3-358e-417f-97f4-fd2d67d21300" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1329.217944] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "fd9af7c3-358e-417f-97f4-fd2d67d21300" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1329.589078] env[62522]: DEBUG nova.network.neutron [-] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1329.637513] env[62522]: DEBUG oslo_vmware.api [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416507, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.430643} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.637821] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] ecc70761-8f69-48f6-8e81-7d2ba3728c70/ecc70761-8f69-48f6-8e81-7d2ba3728c70.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1329.638022] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1329.638272] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-01d8af6a-591f-4b19-bbf8-9d9aa2623a89 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.645887] env[62522]: DEBUG oslo_vmware.api [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1329.645887] env[62522]: value = "task-2416508" [ 1329.645887] env[62522]: _type = "Task" [ 1329.645887] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.654024] env[62522]: DEBUG oslo_vmware.api [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416508, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.720584] env[62522]: DEBUG nova.compute.manager [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1329.732624] env[62522]: DEBUG nova.compute.manager [req-31dc7141-4705-4185-b42f-5cacbf5a0451 req-f5e50835-b654-4229-a758-b28079d4f8e4 service nova] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Received event network-vif-deleted-1f2dff01-fe9d-46ea-af42-ec9d20c5ac2e {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1330.092117] env[62522]: INFO nova.compute.manager [-] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Took 1.54 seconds to deallocate network for instance. [ 1330.156592] env[62522]: DEBUG oslo_vmware.api [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416508, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071004} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.156859] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1330.157939] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35432298-881f-452b-9f8e-9a4d329abaa7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.180056] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Reconfiguring VM instance instance-00000072 to attach disk [datastore2] ecc70761-8f69-48f6-8e81-7d2ba3728c70/ecc70761-8f69-48f6-8e81-7d2ba3728c70.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1330.180368] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-761227df-e7c4-4aa1-9f70-352ea25c4fec {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.200121] env[62522]: DEBUG oslo_vmware.api [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1330.200121] env[62522]: value = "task-2416509" [ 1330.200121] env[62522]: _type = "Task" [ 1330.200121] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.208334] env[62522]: DEBUG oslo_vmware.api [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416509, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.244765] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1330.245090] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1330.246847] env[62522]: INFO nova.compute.claims [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1330.598992] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1330.711385] env[62522]: DEBUG oslo_vmware.api [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416509, 'name': ReconfigVM_Task, 'duration_secs': 0.280912} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.711572] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Reconfigured VM instance instance-00000072 to attach disk [datastore2] ecc70761-8f69-48f6-8e81-7d2ba3728c70/ecc70761-8f69-48f6-8e81-7d2ba3728c70.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1330.712175] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-247a4739-f5be-49e9-a1e7-751ad380147a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.719525] env[62522]: DEBUG oslo_vmware.api [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1330.719525] env[62522]: value = "task-2416510" [ 1330.719525] env[62522]: _type = "Task" [ 1330.719525] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.727658] env[62522]: DEBUG oslo_vmware.api [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416510, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.229755] env[62522]: DEBUG oslo_vmware.api [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416510, 'name': Rename_Task, 'duration_secs': 0.237481} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.230099] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1331.230295] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7a5b014b-8298-4180-91b4-f411d86f68f5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.237783] env[62522]: DEBUG oslo_vmware.api [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1331.237783] env[62522]: value = "task-2416511" [ 1331.237783] env[62522]: _type = "Task" [ 1331.237783] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.245062] env[62522]: DEBUG oslo_vmware.api [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416511, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.356021] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0c6506f-56ca-4f2a-bbad-2e7e17a4bd15 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.363332] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c7886f2-e8f8-4636-9301-bedd764f0ecd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.401330] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d809054c-a9ff-4548-bc66-20650e44b5da {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.411612] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1b4ba5a-9410-4b63-89ae-e5f2e9518915 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.427971] env[62522]: DEBUG nova.compute.provider_tree [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1331.747943] env[62522]: DEBUG oslo_vmware.api [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416511, 'name': PowerOnVM_Task, 'duration_secs': 0.464432} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.748177] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1331.748359] env[62522]: INFO nova.compute.manager [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Took 6.60 seconds to spawn the instance on the hypervisor. [ 1331.748540] env[62522]: DEBUG nova.compute.manager [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1331.749383] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e6dd2aa-09ad-4335-b14a-76679882cdb4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.931798] env[62522]: DEBUG nova.scheduler.client.report [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1332.269027] env[62522]: INFO nova.compute.manager [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Took 11.30 seconds to build instance. [ 1332.436352] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.191s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1332.436904] env[62522]: DEBUG nova.compute.manager [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1332.439757] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.841s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1332.440077] env[62522]: DEBUG nova.objects.instance [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lazy-loading 'resources' on Instance uuid 7406a1a4-a342-475b-ad02-6a29f7c487ee {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1332.519450] env[62522]: DEBUG oslo_vmware.rw_handles [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c64d01-b250-0528-b12c-af5d70d7495a/disk-0.vmdk. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1332.520501] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d50d8a60-4e76-4d11-8ff9-d24628cd929d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.526793] env[62522]: DEBUG oslo_vmware.rw_handles [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c64d01-b250-0528-b12c-af5d70d7495a/disk-0.vmdk is in state: ready. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1332.526976] env[62522]: ERROR oslo_vmware.rw_handles [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c64d01-b250-0528-b12c-af5d70d7495a/disk-0.vmdk due to incomplete transfer. [ 1332.527207] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-79c3467b-6278-4bee-af0b-5ca18d450091 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.534965] env[62522]: DEBUG oslo_vmware.rw_handles [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c64d01-b250-0528-b12c-af5d70d7495a/disk-0.vmdk. {{(pid=62522) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1332.535171] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Uploaded image 414dd827-3fec-484e-897f-aa53cdec2e35 to the Glance image server {{(pid=62522) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1332.537423] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Destroying the VM {{(pid=62522) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1332.537926] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-44e1ba9d-1993-4c85-bd21-0c1168eb4234 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.543768] env[62522]: DEBUG oslo_vmware.api [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1332.543768] env[62522]: value = "task-2416512" [ 1332.543768] env[62522]: _type = "Task" [ 1332.543768] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.551722] env[62522]: DEBUG oslo_vmware.api [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416512, 'name': Destroy_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.771697] env[62522]: DEBUG oslo_concurrency.lockutils [None req-99734820-9932-4d32-b5e2-4879dc63c763 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "ecc70761-8f69-48f6-8e81-7d2ba3728c70" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.815s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1332.943811] env[62522]: DEBUG nova.compute.utils [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1332.948151] env[62522]: DEBUG nova.compute.manager [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1332.948393] env[62522]: DEBUG nova.network.neutron [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1333.028996] env[62522]: DEBUG nova.policy [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '607183068c444260afbec94a63fde1d4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bab9d5d3c27d4c218b88e4a029300a66', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1333.031934] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea5a037-faf2-419e-8a3d-5084180905ce {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.039485] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70fc8724-6e19-40c8-914f-728cf27de557 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.073191] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5780c1fd-610d-4531-a617-53e4442efac6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.078400] env[62522]: DEBUG oslo_vmware.api [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416512, 'name': Destroy_Task, 'duration_secs': 0.3655} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.078998] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Destroyed the VM [ 1333.079296] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Deleting Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1333.079547] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-bbb7491e-7e61-42ed-ad2c-aa35ab78c945 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.084450] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab03cd58-42ed-414c-9518-acc83b9b6088 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.089290] env[62522]: DEBUG oslo_vmware.api [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1333.089290] env[62522]: value = "task-2416513" [ 1333.089290] env[62522]: _type = "Task" [ 1333.089290] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.101883] env[62522]: DEBUG nova.compute.provider_tree [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1333.108610] env[62522]: DEBUG oslo_vmware.api [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416513, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.190217] env[62522]: DEBUG nova.compute.manager [req-fc06cdba-9783-4efa-a113-581133829ab6 req-785d613b-4d23-41d3-9c1f-195633215a8a service nova] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Received event network-changed-0ae0220d-f10b-4927-b124-35afaa7bc701 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1333.190416] env[62522]: DEBUG nova.compute.manager [req-fc06cdba-9783-4efa-a113-581133829ab6 req-785d613b-4d23-41d3-9c1f-195633215a8a service nova] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Refreshing instance network info cache due to event network-changed-0ae0220d-f10b-4927-b124-35afaa7bc701. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1333.190631] env[62522]: DEBUG oslo_concurrency.lockutils [req-fc06cdba-9783-4efa-a113-581133829ab6 req-785d613b-4d23-41d3-9c1f-195633215a8a service nova] Acquiring lock "refresh_cache-ecc70761-8f69-48f6-8e81-7d2ba3728c70" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1333.190774] env[62522]: DEBUG oslo_concurrency.lockutils [req-fc06cdba-9783-4efa-a113-581133829ab6 req-785d613b-4d23-41d3-9c1f-195633215a8a service nova] Acquired lock "refresh_cache-ecc70761-8f69-48f6-8e81-7d2ba3728c70" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1333.190938] env[62522]: DEBUG nova.network.neutron [req-fc06cdba-9783-4efa-a113-581133829ab6 req-785d613b-4d23-41d3-9c1f-195633215a8a service nova] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Refreshing network info cache for port 0ae0220d-f10b-4927-b124-35afaa7bc701 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1333.395411] env[62522]: DEBUG nova.network.neutron [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Successfully created port: 2c6f1904-1976-45eb-9380-5262c08450d2 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1333.449106] env[62522]: DEBUG nova.compute.manager [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1333.599495] env[62522]: DEBUG oslo_vmware.api [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416513, 'name': RemoveSnapshot_Task, 'duration_secs': 0.380973} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.599762] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Deleted Snapshot of the VM instance {{(pid=62522) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1333.600082] env[62522]: DEBUG nova.compute.manager [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1333.600830] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61cc8251-12cf-41f6-ac27-d699bced7891 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.605402] env[62522]: DEBUG nova.scheduler.client.report [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1333.902239] env[62522]: DEBUG nova.network.neutron [req-fc06cdba-9783-4efa-a113-581133829ab6 req-785d613b-4d23-41d3-9c1f-195633215a8a service nova] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Updated VIF entry in instance network info cache for port 0ae0220d-f10b-4927-b124-35afaa7bc701. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1333.902611] env[62522]: DEBUG nova.network.neutron [req-fc06cdba-9783-4efa-a113-581133829ab6 req-785d613b-4d23-41d3-9c1f-195633215a8a service nova] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Updating instance_info_cache with network_info: [{"id": "0ae0220d-f10b-4927-b124-35afaa7bc701", "address": "fa:16:3e:8b:a8:5b", "network": {"id": "2c9c537f-91b6-4217-8eaf-dc187f4ce7d5", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1154766161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fa792663b4ac41b7bf4c5e4b290f9b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ae0220d-f1", "ovs_interfaceid": "0ae0220d-f10b-4927-b124-35afaa7bc701", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1334.113085] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.673s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1334.118203] env[62522]: INFO nova.compute.manager [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Shelve offloading [ 1334.138861] env[62522]: INFO nova.scheduler.client.report [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Deleted allocations for instance 7406a1a4-a342-475b-ad02-6a29f7c487ee [ 1334.405262] env[62522]: DEBUG oslo_concurrency.lockutils [req-fc06cdba-9783-4efa-a113-581133829ab6 req-785d613b-4d23-41d3-9c1f-195633215a8a service nova] Releasing lock "refresh_cache-ecc70761-8f69-48f6-8e81-7d2ba3728c70" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1334.459494] env[62522]: DEBUG nova.compute.manager [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1334.485651] env[62522]: DEBUG nova.virt.hardware [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1334.486031] env[62522]: DEBUG nova.virt.hardware [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1334.486203] env[62522]: DEBUG nova.virt.hardware [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1334.486388] env[62522]: DEBUG nova.virt.hardware [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1334.486538] env[62522]: DEBUG nova.virt.hardware [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1334.486686] env[62522]: DEBUG nova.virt.hardware [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1334.486907] env[62522]: DEBUG nova.virt.hardware [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1334.487084] env[62522]: DEBUG nova.virt.hardware [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1334.487250] env[62522]: DEBUG nova.virt.hardware [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1334.487410] env[62522]: DEBUG nova.virt.hardware [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1334.487580] env[62522]: DEBUG nova.virt.hardware [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1334.488462] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6f00b58-56f5-424e-b962-c31ba88c26d7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.496436] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-558e9ba9-ee0a-4a28-8b46-1513467925dc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.621586] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1334.621911] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a24d9057-a6dd-4475-b850-c53267c11ecc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.631237] env[62522]: DEBUG oslo_vmware.api [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1334.631237] env[62522]: value = "task-2416514" [ 1334.631237] env[62522]: _type = "Task" [ 1334.631237] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.638930] env[62522]: DEBUG oslo_vmware.api [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416514, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.645660] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1bc9bf2b-58a7-4302-b831-fa29a589abf5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "7406a1a4-a342-475b-ad02-6a29f7c487ee" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.716s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1334.931951] env[62522]: DEBUG nova.network.neutron [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Successfully updated port: 2c6f1904-1976-45eb-9380-5262c08450d2 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1335.141571] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] VM already powered off {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1335.141704] env[62522]: DEBUG nova.compute.manager [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1335.142377] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa583cc1-0402-4154-b77a-400a869d08e1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.147869] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "refresh_cache-da11bae6-484b-455e-9462-6f5143d2a9a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1335.148045] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquired lock "refresh_cache-da11bae6-484b-455e-9462-6f5143d2a9a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1335.148217] env[62522]: DEBUG nova.network.neutron [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1335.217219] env[62522]: DEBUG nova.compute.manager [req-db219d4c-47ce-4ba6-94ed-91b02c5385d2 req-c12c840f-fb6e-48e0-9951-7460e2abb74a service nova] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Received event network-vif-plugged-2c6f1904-1976-45eb-9380-5262c08450d2 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1335.217219] env[62522]: DEBUG oslo_concurrency.lockutils [req-db219d4c-47ce-4ba6-94ed-91b02c5385d2 req-c12c840f-fb6e-48e0-9951-7460e2abb74a service nova] Acquiring lock "fd9af7c3-358e-417f-97f4-fd2d67d21300-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1335.217410] env[62522]: DEBUG oslo_concurrency.lockutils [req-db219d4c-47ce-4ba6-94ed-91b02c5385d2 req-c12c840f-fb6e-48e0-9951-7460e2abb74a service nova] Lock "fd9af7c3-358e-417f-97f4-fd2d67d21300-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1335.217575] env[62522]: DEBUG oslo_concurrency.lockutils [req-db219d4c-47ce-4ba6-94ed-91b02c5385d2 req-c12c840f-fb6e-48e0-9951-7460e2abb74a service nova] Lock "fd9af7c3-358e-417f-97f4-fd2d67d21300-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1335.217748] env[62522]: DEBUG nova.compute.manager [req-db219d4c-47ce-4ba6-94ed-91b02c5385d2 req-c12c840f-fb6e-48e0-9951-7460e2abb74a service nova] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] No waiting events found dispatching network-vif-plugged-2c6f1904-1976-45eb-9380-5262c08450d2 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1335.217957] env[62522]: WARNING nova.compute.manager [req-db219d4c-47ce-4ba6-94ed-91b02c5385d2 req-c12c840f-fb6e-48e0-9951-7460e2abb74a service nova] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Received unexpected event network-vif-plugged-2c6f1904-1976-45eb-9380-5262c08450d2 for instance with vm_state building and task_state spawning. [ 1335.218091] env[62522]: DEBUG nova.compute.manager [req-db219d4c-47ce-4ba6-94ed-91b02c5385d2 req-c12c840f-fb6e-48e0-9951-7460e2abb74a service nova] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Received event network-changed-2c6f1904-1976-45eb-9380-5262c08450d2 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1335.218248] env[62522]: DEBUG nova.compute.manager [req-db219d4c-47ce-4ba6-94ed-91b02c5385d2 req-c12c840f-fb6e-48e0-9951-7460e2abb74a service nova] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Refreshing instance network info cache due to event network-changed-2c6f1904-1976-45eb-9380-5262c08450d2. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1335.218430] env[62522]: DEBUG oslo_concurrency.lockutils [req-db219d4c-47ce-4ba6-94ed-91b02c5385d2 req-c12c840f-fb6e-48e0-9951-7460e2abb74a service nova] Acquiring lock "refresh_cache-fd9af7c3-358e-417f-97f4-fd2d67d21300" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1335.218569] env[62522]: DEBUG oslo_concurrency.lockutils [req-db219d4c-47ce-4ba6-94ed-91b02c5385d2 req-c12c840f-fb6e-48e0-9951-7460e2abb74a service nova] Acquired lock "refresh_cache-fd9af7c3-358e-417f-97f4-fd2d67d21300" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1335.218731] env[62522]: DEBUG nova.network.neutron [req-db219d4c-47ce-4ba6-94ed-91b02c5385d2 req-c12c840f-fb6e-48e0-9951-7460e2abb74a service nova] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Refreshing network info cache for port 2c6f1904-1976-45eb-9380-5262c08450d2 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1335.435052] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "refresh_cache-fd9af7c3-358e-417f-97f4-fd2d67d21300" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1335.804089] env[62522]: DEBUG nova.network.neutron [req-db219d4c-47ce-4ba6-94ed-91b02c5385d2 req-c12c840f-fb6e-48e0-9951-7460e2abb74a service nova] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1336.014477] env[62522]: DEBUG nova.network.neutron [req-db219d4c-47ce-4ba6-94ed-91b02c5385d2 req-c12c840f-fb6e-48e0-9951-7460e2abb74a service nova] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1336.095692] env[62522]: DEBUG nova.network.neutron [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Updating instance_info_cache with network_info: [{"id": "d2a62d4f-3bdc-4367-8694-9ba47bdfd799", "address": "fa:16:3e:73:de:e5", "network": {"id": "2a4f6f01-ad28-4f8f-b835-ea86e1791f49", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1577320995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82346c440c3343a0a5c233a48203a13c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2a62d4f-3b", "ovs_interfaceid": "d2a62d4f-3bdc-4367-8694-9ba47bdfd799", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1336.516895] env[62522]: DEBUG oslo_concurrency.lockutils [req-db219d4c-47ce-4ba6-94ed-91b02c5385d2 req-c12c840f-fb6e-48e0-9951-7460e2abb74a service nova] Releasing lock "refresh_cache-fd9af7c3-358e-417f-97f4-fd2d67d21300" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1336.517268] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquired lock "refresh_cache-fd9af7c3-358e-417f-97f4-fd2d67d21300" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1336.517432] env[62522]: DEBUG nova.network.neutron [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1336.598426] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Releasing lock "refresh_cache-da11bae6-484b-455e-9462-6f5143d2a9a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1336.760750] env[62522]: DEBUG oslo_concurrency.lockutils [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "c95f697b-0d68-489d-bfc4-9d129eab1be2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1336.760991] env[62522]: DEBUG oslo_concurrency.lockutils [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "c95f697b-0d68-489d-bfc4-9d129eab1be2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1337.082197] env[62522]: DEBUG nova.network.neutron [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1337.265573] env[62522]: DEBUG nova.compute.manager [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1337.276733] env[62522]: DEBUG nova.compute.manager [req-49d7fd4b-d485-46ab-8cd1-6f66efb4a000 req-20a527fe-f32f-4b4c-b126-5f45941721e3 service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Received event network-vif-unplugged-d2a62d4f-3bdc-4367-8694-9ba47bdfd799 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1337.276950] env[62522]: DEBUG oslo_concurrency.lockutils [req-49d7fd4b-d485-46ab-8cd1-6f66efb4a000 req-20a527fe-f32f-4b4c-b126-5f45941721e3 service nova] Acquiring lock "da11bae6-484b-455e-9462-6f5143d2a9a9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1337.277518] env[62522]: DEBUG oslo_concurrency.lockutils [req-49d7fd4b-d485-46ab-8cd1-6f66efb4a000 req-20a527fe-f32f-4b4c-b126-5f45941721e3 service nova] Lock "da11bae6-484b-455e-9462-6f5143d2a9a9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1337.277736] env[62522]: DEBUG oslo_concurrency.lockutils [req-49d7fd4b-d485-46ab-8cd1-6f66efb4a000 req-20a527fe-f32f-4b4c-b126-5f45941721e3 service nova] Lock "da11bae6-484b-455e-9462-6f5143d2a9a9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1337.277899] env[62522]: DEBUG nova.compute.manager [req-49d7fd4b-d485-46ab-8cd1-6f66efb4a000 req-20a527fe-f32f-4b4c-b126-5f45941721e3 service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] No waiting events found dispatching network-vif-unplugged-d2a62d4f-3bdc-4367-8694-9ba47bdfd799 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1337.278145] env[62522]: WARNING nova.compute.manager [req-49d7fd4b-d485-46ab-8cd1-6f66efb4a000 req-20a527fe-f32f-4b4c-b126-5f45941721e3 service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Received unexpected event network-vif-unplugged-d2a62d4f-3bdc-4367-8694-9ba47bdfd799 for instance with vm_state shelved and task_state shelving_offloading. [ 1337.325451] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1337.326371] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fde11917-c842-479a-a899-b2a44c794622 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.334130] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1337.334358] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-02c8ab66-6142-4f10-adba-300c93bb22e3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.352897] env[62522]: DEBUG nova.network.neutron [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Updating instance_info_cache with network_info: [{"id": "2c6f1904-1976-45eb-9380-5262c08450d2", "address": "fa:16:3e:b5:10:37", "network": {"id": "e3153dad-6ab7-45a3-bda4-5ebbd95258f4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-521501740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab9d5d3c27d4c218b88e4a029300a66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c6f1904-19", "ovs_interfaceid": "2c6f1904-1976-45eb-9380-5262c08450d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1337.619237] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1337.619650] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1337.619650] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Deleting the datastore file [datastore1] da11bae6-484b-455e-9462-6f5143d2a9a9 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1337.619892] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-55f11404-369a-4def-991b-5bc1839eb39d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.627346] env[62522]: DEBUG oslo_vmware.api [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1337.627346] env[62522]: value = "task-2416516" [ 1337.627346] env[62522]: _type = "Task" [ 1337.627346] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.635325] env[62522]: DEBUG oslo_vmware.api [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416516, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.790647] env[62522]: DEBUG oslo_concurrency.lockutils [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1337.790930] env[62522]: DEBUG oslo_concurrency.lockutils [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1337.792411] env[62522]: INFO nova.compute.claims [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1337.855295] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Releasing lock "refresh_cache-fd9af7c3-358e-417f-97f4-fd2d67d21300" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1337.855597] env[62522]: DEBUG nova.compute.manager [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Instance network_info: |[{"id": "2c6f1904-1976-45eb-9380-5262c08450d2", "address": "fa:16:3e:b5:10:37", "network": {"id": "e3153dad-6ab7-45a3-bda4-5ebbd95258f4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-521501740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab9d5d3c27d4c218b88e4a029300a66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c6f1904-19", "ovs_interfaceid": "2c6f1904-1976-45eb-9380-5262c08450d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1337.856139] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b5:10:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f625f389-b7cf-49b9-998a-87f3a9e3f234', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2c6f1904-1976-45eb-9380-5262c08450d2', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1337.863550] env[62522]: DEBUG oslo.service.loopingcall [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1337.863756] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1337.863976] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c00935ad-b6ef-4092-81bf-33411297a9e4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.883863] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1337.883863] env[62522]: value = "task-2416517" [ 1337.883863] env[62522]: _type = "Task" [ 1337.883863] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.891309] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416517, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.138458] env[62522]: DEBUG oslo_vmware.api [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416516, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140548} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.138692] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1338.138904] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1338.139114] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1338.161242] env[62522]: INFO nova.scheduler.client.report [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Deleted allocations for instance da11bae6-484b-455e-9462-6f5143d2a9a9 [ 1338.394588] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416517, 'name': CreateVM_Task, 'duration_secs': 0.284772} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.394778] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1338.395494] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1338.395667] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1338.396018] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1338.396277] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6bf3a24e-812f-446f-a8eb-3f5bf3c878fb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.400594] env[62522]: DEBUG oslo_vmware.api [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1338.400594] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5271f33e-1d7d-c9d1-2056-581fd8624e79" [ 1338.400594] env[62522]: _type = "Task" [ 1338.400594] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.407911] env[62522]: DEBUG oslo_vmware.api [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5271f33e-1d7d-c9d1-2056-581fd8624e79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.666710] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1338.868520] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a56d607-28b2-46cf-9081-a5da86788a16 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.876310] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca7b599-e67a-4dee-ba7c-4276816c98dc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.908174] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7f16cee-f766-40c3-96fd-d38eb2af8c0c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.918868] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c997ba38-aa1f-40e1-b9cd-3bb1bae5ffa0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.922346] env[62522]: DEBUG oslo_vmware.api [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5271f33e-1d7d-c9d1-2056-581fd8624e79, 'name': SearchDatastore_Task, 'duration_secs': 0.012582} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.922638] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1338.922859] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1338.923101] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1338.923253] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1338.923435] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1338.923949] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-afe2525f-a16a-408f-a9aa-216171545eb6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.934064] env[62522]: DEBUG nova.compute.provider_tree [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1338.948467] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1338.948651] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1338.949381] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45f5b542-4270-4cd7-8c20-95b805d4ea30 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.955782] env[62522]: DEBUG oslo_vmware.api [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1338.955782] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52488f83-6aac-b444-f463-89546ae8c7a3" [ 1338.955782] env[62522]: _type = "Task" [ 1338.955782] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.963023] env[62522]: DEBUG oslo_vmware.api [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52488f83-6aac-b444-f463-89546ae8c7a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.307026] env[62522]: DEBUG nova.compute.manager [req-1ab9f4a7-dd12-4b86-ae7e-7918da6d725b req-e5c69ad6-b825-4e12-9835-f3193d32241e service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Received event network-changed-d2a62d4f-3bdc-4367-8694-9ba47bdfd799 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1339.307311] env[62522]: DEBUG nova.compute.manager [req-1ab9f4a7-dd12-4b86-ae7e-7918da6d725b req-e5c69ad6-b825-4e12-9835-f3193d32241e service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Refreshing instance network info cache due to event network-changed-d2a62d4f-3bdc-4367-8694-9ba47bdfd799. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1339.307566] env[62522]: DEBUG oslo_concurrency.lockutils [req-1ab9f4a7-dd12-4b86-ae7e-7918da6d725b req-e5c69ad6-b825-4e12-9835-f3193d32241e service nova] Acquiring lock "refresh_cache-da11bae6-484b-455e-9462-6f5143d2a9a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1339.307736] env[62522]: DEBUG oslo_concurrency.lockutils [req-1ab9f4a7-dd12-4b86-ae7e-7918da6d725b req-e5c69ad6-b825-4e12-9835-f3193d32241e service nova] Acquired lock "refresh_cache-da11bae6-484b-455e-9462-6f5143d2a9a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1339.307959] env[62522]: DEBUG nova.network.neutron [req-1ab9f4a7-dd12-4b86-ae7e-7918da6d725b req-e5c69ad6-b825-4e12-9835-f3193d32241e service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Refreshing network info cache for port d2a62d4f-3bdc-4367-8694-9ba47bdfd799 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1339.437241] env[62522]: DEBUG nova.scheduler.client.report [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1339.467072] env[62522]: DEBUG oslo_vmware.api [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52488f83-6aac-b444-f463-89546ae8c7a3, 'name': SearchDatastore_Task, 'duration_secs': 0.009395} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.467526] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3a63a0b-2ae0-4acb-91de-bf6a12545cae {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.472761] env[62522]: DEBUG oslo_vmware.api [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1339.472761] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5249218c-7e27-7a4d-fbc4-92768b34b0be" [ 1339.472761] env[62522]: _type = "Task" [ 1339.472761] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.481121] env[62522]: DEBUG oslo_vmware.api [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5249218c-7e27-7a4d-fbc4-92768b34b0be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.943985] env[62522]: DEBUG oslo_concurrency.lockutils [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.153s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1339.944552] env[62522]: DEBUG nova.compute.manager [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1339.947566] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.281s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1339.947794] env[62522]: DEBUG nova.objects.instance [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lazy-loading 'resources' on Instance uuid da11bae6-484b-455e-9462-6f5143d2a9a9 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1339.983058] env[62522]: DEBUG oslo_vmware.api [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5249218c-7e27-7a4d-fbc4-92768b34b0be, 'name': SearchDatastore_Task, 'duration_secs': 0.009804} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.983966] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1339.984247] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] fd9af7c3-358e-417f-97f4-fd2d67d21300/fd9af7c3-358e-417f-97f4-fd2d67d21300.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1339.984502] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7f0d9c1d-6739-4a6b-83d1-15c454b3388d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.993213] env[62522]: DEBUG oslo_vmware.api [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1339.993213] env[62522]: value = "task-2416518" [ 1339.993213] env[62522]: _type = "Task" [ 1339.993213] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.000975] env[62522]: DEBUG oslo_vmware.api [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416518, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.024223] env[62522]: DEBUG nova.network.neutron [req-1ab9f4a7-dd12-4b86-ae7e-7918da6d725b req-e5c69ad6-b825-4e12-9835-f3193d32241e service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Updated VIF entry in instance network info cache for port d2a62d4f-3bdc-4367-8694-9ba47bdfd799. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1340.024606] env[62522]: DEBUG nova.network.neutron [req-1ab9f4a7-dd12-4b86-ae7e-7918da6d725b req-e5c69ad6-b825-4e12-9835-f3193d32241e service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Updating instance_info_cache with network_info: [{"id": "d2a62d4f-3bdc-4367-8694-9ba47bdfd799", "address": "fa:16:3e:73:de:e5", "network": {"id": "2a4f6f01-ad28-4f8f-b835-ea86e1791f49", "bridge": null, "label": "tempest-ServerActionsTestOtherB-1577320995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82346c440c3343a0a5c233a48203a13c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapd2a62d4f-3b", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1340.450115] env[62522]: DEBUG nova.compute.utils [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1340.451664] env[62522]: DEBUG nova.objects.instance [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lazy-loading 'numa_topology' on Instance uuid da11bae6-484b-455e-9462-6f5143d2a9a9 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1340.453528] env[62522]: DEBUG nova.compute.manager [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1340.453733] env[62522]: DEBUG nova.network.neutron [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1340.491289] env[62522]: DEBUG nova.policy [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3a4ba3a3d3a34495b7a7e0618577d60f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '071dd4c295a54e388099d5bf0f4e300b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1340.502919] env[62522]: DEBUG oslo_vmware.api [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416518, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.425609} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.503175] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] fd9af7c3-358e-417f-97f4-fd2d67d21300/fd9af7c3-358e-417f-97f4-fd2d67d21300.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1340.503379] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1340.503622] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-802ea807-0177-4622-bf01-fdc39c0789a4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.510694] env[62522]: DEBUG oslo_vmware.api [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1340.510694] env[62522]: value = "task-2416519" [ 1340.510694] env[62522]: _type = "Task" [ 1340.510694] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.518742] env[62522]: DEBUG oslo_vmware.api [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416519, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.527421] env[62522]: DEBUG oslo_concurrency.lockutils [req-1ab9f4a7-dd12-4b86-ae7e-7918da6d725b req-e5c69ad6-b825-4e12-9835-f3193d32241e service nova] Releasing lock "refresh_cache-da11bae6-484b-455e-9462-6f5143d2a9a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1340.815832] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "da11bae6-484b-455e-9462-6f5143d2a9a9" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1340.900534] env[62522]: DEBUG nova.network.neutron [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Successfully created port: 74c8cccc-0aa8-4147-9172-cbb2cbfcb35f {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1340.954809] env[62522]: DEBUG nova.objects.base [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62522) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1340.959229] env[62522]: DEBUG nova.compute.manager [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1341.023506] env[62522]: DEBUG oslo_vmware.api [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416519, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070336} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.026695] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1341.028205] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfdde3cb-9cda-4d04-acbe-19d1464b3842 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.051547] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] fd9af7c3-358e-417f-97f4-fd2d67d21300/fd9af7c3-358e-417f-97f4-fd2d67d21300.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1341.054567] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bf12b46b-ef11-4f1c-a891-877545aa7fb7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.076439] env[62522]: DEBUG oslo_vmware.api [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1341.076439] env[62522]: value = "task-2416520" [ 1341.076439] env[62522]: _type = "Task" [ 1341.076439] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.088201] env[62522]: DEBUG oslo_vmware.api [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416520, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.095022] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c41aa7ba-606a-48a0-b0b0-d5a42cd57060 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.101113] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-758b30b6-7311-4c44-82df-65db368fc62c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.139821] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0726ae0c-482c-4a90-a275-c94ef0e31b90 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.148927] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce02664b-9dd7-4227-936e-d9a17b653801 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.165322] env[62522]: DEBUG nova.compute.provider_tree [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1341.587601] env[62522]: DEBUG oslo_vmware.api [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416520, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.668446] env[62522]: DEBUG nova.scheduler.client.report [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1341.971564] env[62522]: DEBUG nova.compute.manager [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1341.998500] env[62522]: DEBUG nova.virt.hardware [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1341.998872] env[62522]: DEBUG nova.virt.hardware [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1341.999098] env[62522]: DEBUG nova.virt.hardware [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1341.999305] env[62522]: DEBUG nova.virt.hardware [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1341.999472] env[62522]: DEBUG nova.virt.hardware [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1341.999604] env[62522]: DEBUG nova.virt.hardware [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1341.999816] env[62522]: DEBUG nova.virt.hardware [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1341.999980] env[62522]: DEBUG nova.virt.hardware [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1342.000168] env[62522]: DEBUG nova.virt.hardware [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1342.000486] env[62522]: DEBUG nova.virt.hardware [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1342.000486] env[62522]: DEBUG nova.virt.hardware [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1342.001380] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7600036a-a0f7-437c-b079-1378b1e915e0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.009709] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59b45e3d-7d4e-45c8-bcf1-2c8d002cd897 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.086941] env[62522]: DEBUG oslo_vmware.api [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416520, 'name': ReconfigVM_Task, 'duration_secs': 0.830997} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.087268] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Reconfigured VM instance instance-00000073 to attach disk [datastore2] fd9af7c3-358e-417f-97f4-fd2d67d21300/fd9af7c3-358e-417f-97f4-fd2d67d21300.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1342.087922] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-140d20cd-d5c6-4149-bef7-c1e41f5d302a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.095036] env[62522]: DEBUG oslo_vmware.api [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1342.095036] env[62522]: value = "task-2416521" [ 1342.095036] env[62522]: _type = "Task" [ 1342.095036] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.102788] env[62522]: DEBUG oslo_vmware.api [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416521, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.174041] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.226s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1342.305683] env[62522]: DEBUG nova.compute.manager [req-a1da9870-9925-4568-8854-4e9d6e8a88b4 req-b63eeaf5-8aef-4a1b-bc59-462bae141844 service nova] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Received event network-vif-plugged-74c8cccc-0aa8-4147-9172-cbb2cbfcb35f {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1342.305954] env[62522]: DEBUG oslo_concurrency.lockutils [req-a1da9870-9925-4568-8854-4e9d6e8a88b4 req-b63eeaf5-8aef-4a1b-bc59-462bae141844 service nova] Acquiring lock "c95f697b-0d68-489d-bfc4-9d129eab1be2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1342.306212] env[62522]: DEBUG oslo_concurrency.lockutils [req-a1da9870-9925-4568-8854-4e9d6e8a88b4 req-b63eeaf5-8aef-4a1b-bc59-462bae141844 service nova] Lock "c95f697b-0d68-489d-bfc4-9d129eab1be2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1342.306437] env[62522]: DEBUG oslo_concurrency.lockutils [req-a1da9870-9925-4568-8854-4e9d6e8a88b4 req-b63eeaf5-8aef-4a1b-bc59-462bae141844 service nova] Lock "c95f697b-0d68-489d-bfc4-9d129eab1be2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1342.306636] env[62522]: DEBUG nova.compute.manager [req-a1da9870-9925-4568-8854-4e9d6e8a88b4 req-b63eeaf5-8aef-4a1b-bc59-462bae141844 service nova] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] No waiting events found dispatching network-vif-plugged-74c8cccc-0aa8-4147-9172-cbb2cbfcb35f {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1342.306804] env[62522]: WARNING nova.compute.manager [req-a1da9870-9925-4568-8854-4e9d6e8a88b4 req-b63eeaf5-8aef-4a1b-bc59-462bae141844 service nova] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Received unexpected event network-vif-plugged-74c8cccc-0aa8-4147-9172-cbb2cbfcb35f for instance with vm_state building and task_state spawning. [ 1342.395063] env[62522]: DEBUG nova.network.neutron [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Successfully updated port: 74c8cccc-0aa8-4147-9172-cbb2cbfcb35f {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1342.605626] env[62522]: DEBUG oslo_vmware.api [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416521, 'name': Rename_Task, 'duration_secs': 0.134433} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.605626] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1342.606891] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cbda75d8-c108-4c17-bcdb-ed5918ed4d13 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.612707] env[62522]: DEBUG oslo_vmware.api [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1342.612707] env[62522]: value = "task-2416522" [ 1342.612707] env[62522]: _type = "Task" [ 1342.612707] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.621836] env[62522]: DEBUG oslo_vmware.api [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416522, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.683463] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df602586-8a4a-4f9b-b7f3-ff9b12267372 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "da11bae6-484b-455e-9462-6f5143d2a9a9" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 22.306s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1342.684390] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "da11bae6-484b-455e-9462-6f5143d2a9a9" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.869s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1342.684611] env[62522]: INFO nova.compute.manager [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Unshelving [ 1342.898069] env[62522]: DEBUG oslo_concurrency.lockutils [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "refresh_cache-c95f697b-0d68-489d-bfc4-9d129eab1be2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1342.898283] env[62522]: DEBUG oslo_concurrency.lockutils [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquired lock "refresh_cache-c95f697b-0d68-489d-bfc4-9d129eab1be2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1342.898356] env[62522]: DEBUG nova.network.neutron [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1343.123324] env[62522]: DEBUG oslo_vmware.api [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416522, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.430259] env[62522]: DEBUG nova.network.neutron [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1343.552271] env[62522]: DEBUG nova.network.neutron [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Updating instance_info_cache with network_info: [{"id": "74c8cccc-0aa8-4147-9172-cbb2cbfcb35f", "address": "fa:16:3e:88:4a:7a", "network": {"id": "fd993a8b-571c-4873-a5d6-4d8c60e23d38", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2084093882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "071dd4c295a54e388099d5bf0f4e300b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap74c8cccc-0a", "ovs_interfaceid": "74c8cccc-0aa8-4147-9172-cbb2cbfcb35f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1343.624189] env[62522]: DEBUG oslo_vmware.api [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416522, 'name': PowerOnVM_Task, 'duration_secs': 0.513435} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.624477] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1343.624677] env[62522]: INFO nova.compute.manager [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Took 9.17 seconds to spawn the instance on the hypervisor. [ 1343.624890] env[62522]: DEBUG nova.compute.manager [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1343.625648] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbfd74c1-ec27-418e-abaf-7b6ed68881f4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.709508] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1343.709786] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1343.710094] env[62522]: DEBUG nova.objects.instance [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lazy-loading 'pci_requests' on Instance uuid da11bae6-484b-455e-9462-6f5143d2a9a9 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1344.054765] env[62522]: DEBUG oslo_concurrency.lockutils [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Releasing lock "refresh_cache-c95f697b-0d68-489d-bfc4-9d129eab1be2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1344.055186] env[62522]: DEBUG nova.compute.manager [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Instance network_info: |[{"id": "74c8cccc-0aa8-4147-9172-cbb2cbfcb35f", "address": "fa:16:3e:88:4a:7a", "network": {"id": "fd993a8b-571c-4873-a5d6-4d8c60e23d38", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2084093882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "071dd4c295a54e388099d5bf0f4e300b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap74c8cccc-0a", "ovs_interfaceid": "74c8cccc-0aa8-4147-9172-cbb2cbfcb35f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1344.055628] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:88:4a:7a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd7b5f1ef-d4b9-4ec3-b047-17e4cb349d25', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '74c8cccc-0aa8-4147-9172-cbb2cbfcb35f', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1344.063113] env[62522]: DEBUG oslo.service.loopingcall [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1344.063336] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1344.063562] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a97066a4-562b-498e-9d80-abe690a3b94e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.082840] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1344.082840] env[62522]: value = "task-2416523" [ 1344.082840] env[62522]: _type = "Task" [ 1344.082840] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.090405] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416523, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.142975] env[62522]: INFO nova.compute.manager [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Took 13.92 seconds to build instance. [ 1344.214665] env[62522]: DEBUG nova.objects.instance [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lazy-loading 'numa_topology' on Instance uuid da11bae6-484b-455e-9462-6f5143d2a9a9 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1344.332285] env[62522]: DEBUG nova.compute.manager [req-c440fbe4-fd86-4003-a35b-e6068ee546a8 req-d9f003ec-84c6-4cf0-aa80-73dfb7e2c632 service nova] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Received event network-changed-74c8cccc-0aa8-4147-9172-cbb2cbfcb35f {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1344.332490] env[62522]: DEBUG nova.compute.manager [req-c440fbe4-fd86-4003-a35b-e6068ee546a8 req-d9f003ec-84c6-4cf0-aa80-73dfb7e2c632 service nova] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Refreshing instance network info cache due to event network-changed-74c8cccc-0aa8-4147-9172-cbb2cbfcb35f. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1344.332701] env[62522]: DEBUG oslo_concurrency.lockutils [req-c440fbe4-fd86-4003-a35b-e6068ee546a8 req-d9f003ec-84c6-4cf0-aa80-73dfb7e2c632 service nova] Acquiring lock "refresh_cache-c95f697b-0d68-489d-bfc4-9d129eab1be2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1344.332845] env[62522]: DEBUG oslo_concurrency.lockutils [req-c440fbe4-fd86-4003-a35b-e6068ee546a8 req-d9f003ec-84c6-4cf0-aa80-73dfb7e2c632 service nova] Acquired lock "refresh_cache-c95f697b-0d68-489d-bfc4-9d129eab1be2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.333018] env[62522]: DEBUG nova.network.neutron [req-c440fbe4-fd86-4003-a35b-e6068ee546a8 req-d9f003ec-84c6-4cf0-aa80-73dfb7e2c632 service nova] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Refreshing network info cache for port 74c8cccc-0aa8-4147-9172-cbb2cbfcb35f {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1344.597031] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416523, 'name': CreateVM_Task, 'duration_secs': 0.328648} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.597031] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1344.597031] env[62522]: DEBUG oslo_concurrency.lockutils [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1344.597031] env[62522]: DEBUG oslo_concurrency.lockutils [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.597031] env[62522]: DEBUG oslo_concurrency.lockutils [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1344.597031] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-387424fa-58dc-4acc-9265-b95227f984c1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.600985] env[62522]: DEBUG oslo_vmware.api [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1344.600985] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525b2bdb-0a5d-24ad-9379-ea94e61a4cb9" [ 1344.600985] env[62522]: _type = "Task" [ 1344.600985] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.608857] env[62522]: DEBUG oslo_vmware.api [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525b2bdb-0a5d-24ad-9379-ea94e61a4cb9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.645895] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b26732fe-c23f-43d2-bd97-c3192ce0dfd7 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "fd9af7c3-358e-417f-97f4-fd2d67d21300" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.428s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1344.717613] env[62522]: INFO nova.compute.claims [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1345.030088] env[62522]: DEBUG nova.network.neutron [req-c440fbe4-fd86-4003-a35b-e6068ee546a8 req-d9f003ec-84c6-4cf0-aa80-73dfb7e2c632 service nova] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Updated VIF entry in instance network info cache for port 74c8cccc-0aa8-4147-9172-cbb2cbfcb35f. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1345.030448] env[62522]: DEBUG nova.network.neutron [req-c440fbe4-fd86-4003-a35b-e6068ee546a8 req-d9f003ec-84c6-4cf0-aa80-73dfb7e2c632 service nova] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Updating instance_info_cache with network_info: [{"id": "74c8cccc-0aa8-4147-9172-cbb2cbfcb35f", "address": "fa:16:3e:88:4a:7a", "network": {"id": "fd993a8b-571c-4873-a5d6-4d8c60e23d38", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2084093882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "071dd4c295a54e388099d5bf0f4e300b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap74c8cccc-0a", "ovs_interfaceid": "74c8cccc-0aa8-4147-9172-cbb2cbfcb35f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1345.112060] env[62522]: DEBUG oslo_vmware.api [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525b2bdb-0a5d-24ad-9379-ea94e61a4cb9, 'name': SearchDatastore_Task, 'duration_secs': 0.009136} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.112370] env[62522]: DEBUG oslo_concurrency.lockutils [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.112609] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1345.112869] env[62522]: DEBUG oslo_concurrency.lockutils [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1345.113087] env[62522]: DEBUG oslo_concurrency.lockutils [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1345.113285] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1345.113541] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3ecfb3b8-9eb0-433b-9d93-9d102efa7099 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.121328] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1345.121496] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1345.122282] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d832986b-a892-48df-b17d-c865582ef628 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.127459] env[62522]: DEBUG oslo_vmware.api [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1345.127459] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]527f2635-270d-cc55-40f8-b6fb489ff70d" [ 1345.127459] env[62522]: _type = "Task" [ 1345.127459] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.134217] env[62522]: DEBUG oslo_vmware.api [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]527f2635-270d-cc55-40f8-b6fb489ff70d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.533443] env[62522]: DEBUG oslo_concurrency.lockutils [req-c440fbe4-fd86-4003-a35b-e6068ee546a8 req-d9f003ec-84c6-4cf0-aa80-73dfb7e2c632 service nova] Releasing lock "refresh_cache-c95f697b-0d68-489d-bfc4-9d129eab1be2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.638998] env[62522]: DEBUG oslo_vmware.api [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]527f2635-270d-cc55-40f8-b6fb489ff70d, 'name': SearchDatastore_Task, 'duration_secs': 0.00752} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.639843] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64c78128-9967-4d40-a9dc-877c2bea434e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.645508] env[62522]: DEBUG oslo_vmware.api [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1345.645508] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52996a54-2e53-fa0f-f900-e3a0ac0e6aba" [ 1345.645508] env[62522]: _type = "Task" [ 1345.645508] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.653122] env[62522]: DEBUG oslo_vmware.api [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52996a54-2e53-fa0f-f900-e3a0ac0e6aba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.820010] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e289d145-2494-45a7-8ffd-2b4370b2dab7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.828193] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d40f082-48ba-4eae-b515-7c2488170250 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.860771] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f458f500-9692-4a9f-9da1-4b78e53d5d59 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.868128] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a52bf511-e23e-48e8-b52b-3d4952fa8f2b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.881579] env[62522]: DEBUG nova.compute.provider_tree [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1346.156465] env[62522]: DEBUG oslo_vmware.api [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52996a54-2e53-fa0f-f900-e3a0ac0e6aba, 'name': SearchDatastore_Task, 'duration_secs': 0.008626} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.156763] env[62522]: DEBUG oslo_concurrency.lockutils [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1346.157022] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] c95f697b-0d68-489d-bfc4-9d129eab1be2/c95f697b-0d68-489d-bfc4-9d129eab1be2.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1346.157334] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-13c08164-b3c0-4da4-9edb-849dc23d27df {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.163726] env[62522]: DEBUG oslo_vmware.api [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1346.163726] env[62522]: value = "task-2416524" [ 1346.163726] env[62522]: _type = "Task" [ 1346.163726] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.171220] env[62522]: DEBUG oslo_vmware.api [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416524, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.366108] env[62522]: DEBUG nova.compute.manager [req-0b9162a4-3096-44a7-95e2-e53f850987be req-c0c52baa-4847-48aa-aadf-0a47c456b511 service nova] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Received event network-changed-2c6f1904-1976-45eb-9380-5262c08450d2 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1346.366368] env[62522]: DEBUG nova.compute.manager [req-0b9162a4-3096-44a7-95e2-e53f850987be req-c0c52baa-4847-48aa-aadf-0a47c456b511 service nova] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Refreshing instance network info cache due to event network-changed-2c6f1904-1976-45eb-9380-5262c08450d2. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1346.366593] env[62522]: DEBUG oslo_concurrency.lockutils [req-0b9162a4-3096-44a7-95e2-e53f850987be req-c0c52baa-4847-48aa-aadf-0a47c456b511 service nova] Acquiring lock "refresh_cache-fd9af7c3-358e-417f-97f4-fd2d67d21300" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1346.366738] env[62522]: DEBUG oslo_concurrency.lockutils [req-0b9162a4-3096-44a7-95e2-e53f850987be req-c0c52baa-4847-48aa-aadf-0a47c456b511 service nova] Acquired lock "refresh_cache-fd9af7c3-358e-417f-97f4-fd2d67d21300" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1346.366982] env[62522]: DEBUG nova.network.neutron [req-0b9162a4-3096-44a7-95e2-e53f850987be req-c0c52baa-4847-48aa-aadf-0a47c456b511 service nova] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Refreshing network info cache for port 2c6f1904-1976-45eb-9380-5262c08450d2 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1346.384576] env[62522]: DEBUG nova.scheduler.client.report [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1346.673381] env[62522]: DEBUG oslo_vmware.api [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416524, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.470132} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.673712] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] c95f697b-0d68-489d-bfc4-9d129eab1be2/c95f697b-0d68-489d-bfc4-9d129eab1be2.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1346.673849] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1346.674178] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d70a4fe9-96c4-4726-86ff-7abe3b8cabc2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.680485] env[62522]: DEBUG oslo_vmware.api [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1346.680485] env[62522]: value = "task-2416525" [ 1346.680485] env[62522]: _type = "Task" [ 1346.680485] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.687783] env[62522]: DEBUG oslo_vmware.api [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416525, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.889057] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.179s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1346.933069] env[62522]: INFO nova.network.neutron [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Updating port d2a62d4f-3bdc-4367-8694-9ba47bdfd799 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1347.190403] env[62522]: DEBUG oslo_vmware.api [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416525, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072083} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.192620] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1347.193392] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b17d2ea3-a4ee-4a86-87e1-8c65b5d9b3af {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.214740] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] c95f697b-0d68-489d-bfc4-9d129eab1be2/c95f697b-0d68-489d-bfc4-9d129eab1be2.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1347.215057] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c164f23d-b2c6-45b5-adb9-89e5c1a02762 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.235895] env[62522]: DEBUG oslo_vmware.api [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1347.235895] env[62522]: value = "task-2416526" [ 1347.235895] env[62522]: _type = "Task" [ 1347.235895] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.245447] env[62522]: DEBUG oslo_vmware.api [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416526, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.248073] env[62522]: DEBUG nova.network.neutron [req-0b9162a4-3096-44a7-95e2-e53f850987be req-c0c52baa-4847-48aa-aadf-0a47c456b511 service nova] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Updated VIF entry in instance network info cache for port 2c6f1904-1976-45eb-9380-5262c08450d2. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1347.248305] env[62522]: DEBUG nova.network.neutron [req-0b9162a4-3096-44a7-95e2-e53f850987be req-c0c52baa-4847-48aa-aadf-0a47c456b511 service nova] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Updating instance_info_cache with network_info: [{"id": "2c6f1904-1976-45eb-9380-5262c08450d2", "address": "fa:16:3e:b5:10:37", "network": {"id": "e3153dad-6ab7-45a3-bda4-5ebbd95258f4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-521501740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.218", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab9d5d3c27d4c218b88e4a029300a66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c6f1904-19", "ovs_interfaceid": "2c6f1904-1976-45eb-9380-5262c08450d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1347.747079] env[62522]: DEBUG oslo_vmware.api [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416526, 'name': ReconfigVM_Task, 'duration_secs': 0.26804} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.747506] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Reconfigured VM instance instance-00000074 to attach disk [datastore1] c95f697b-0d68-489d-bfc4-9d129eab1be2/c95f697b-0d68-489d-bfc4-9d129eab1be2.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1347.748381] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-348eb8f0-1ee9-4a65-9466-3f83bfbad713 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.750506] env[62522]: DEBUG oslo_concurrency.lockutils [req-0b9162a4-3096-44a7-95e2-e53f850987be req-c0c52baa-4847-48aa-aadf-0a47c456b511 service nova] Releasing lock "refresh_cache-fd9af7c3-358e-417f-97f4-fd2d67d21300" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1347.756097] env[62522]: DEBUG oslo_vmware.api [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1347.756097] env[62522]: value = "task-2416527" [ 1347.756097] env[62522]: _type = "Task" [ 1347.756097] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.763365] env[62522]: DEBUG oslo_vmware.api [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416527, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.247552] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1348.265866] env[62522]: DEBUG oslo_vmware.api [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416527, 'name': Rename_Task, 'duration_secs': 0.224065} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.266173] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1348.266413] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fff1ff89-ed57-4105-993c-7ca88349c564 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.272688] env[62522]: DEBUG oslo_vmware.api [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1348.272688] env[62522]: value = "task-2416528" [ 1348.272688] env[62522]: _type = "Task" [ 1348.272688] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.279778] env[62522]: DEBUG oslo_vmware.api [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416528, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.474316] env[62522]: DEBUG nova.compute.manager [req-379d3260-db06-41a7-8b69-68707bd731ea req-b0002269-d1bc-4187-ac53-89415643ca49 service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Received event network-vif-plugged-d2a62d4f-3bdc-4367-8694-9ba47bdfd799 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1348.474530] env[62522]: DEBUG oslo_concurrency.lockutils [req-379d3260-db06-41a7-8b69-68707bd731ea req-b0002269-d1bc-4187-ac53-89415643ca49 service nova] Acquiring lock "da11bae6-484b-455e-9462-6f5143d2a9a9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1348.474739] env[62522]: DEBUG oslo_concurrency.lockutils [req-379d3260-db06-41a7-8b69-68707bd731ea req-b0002269-d1bc-4187-ac53-89415643ca49 service nova] Lock "da11bae6-484b-455e-9462-6f5143d2a9a9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1348.474945] env[62522]: DEBUG oslo_concurrency.lockutils [req-379d3260-db06-41a7-8b69-68707bd731ea req-b0002269-d1bc-4187-ac53-89415643ca49 service nova] Lock "da11bae6-484b-455e-9462-6f5143d2a9a9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1348.475133] env[62522]: DEBUG nova.compute.manager [req-379d3260-db06-41a7-8b69-68707bd731ea req-b0002269-d1bc-4187-ac53-89415643ca49 service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] No waiting events found dispatching network-vif-plugged-d2a62d4f-3bdc-4367-8694-9ba47bdfd799 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1348.475301] env[62522]: WARNING nova.compute.manager [req-379d3260-db06-41a7-8b69-68707bd731ea req-b0002269-d1bc-4187-ac53-89415643ca49 service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Received unexpected event network-vif-plugged-d2a62d4f-3bdc-4367-8694-9ba47bdfd799 for instance with vm_state shelved_offloaded and task_state spawning. [ 1348.567879] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "refresh_cache-da11bae6-484b-455e-9462-6f5143d2a9a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1348.568022] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquired lock "refresh_cache-da11bae6-484b-455e-9462-6f5143d2a9a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1348.568251] env[62522]: DEBUG nova.network.neutron [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1348.782175] env[62522]: DEBUG oslo_vmware.api [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416528, 'name': PowerOnVM_Task, 'duration_secs': 0.487027} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.782591] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1348.782716] env[62522]: INFO nova.compute.manager [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Took 6.81 seconds to spawn the instance on the hypervisor. [ 1348.782907] env[62522]: DEBUG nova.compute.manager [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1348.783666] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27e5a4bd-647e-4747-a2f0-8c3c700b69e6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.267849] env[62522]: DEBUG nova.network.neutron [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Updating instance_info_cache with network_info: [{"id": "d2a62d4f-3bdc-4367-8694-9ba47bdfd799", "address": "fa:16:3e:73:de:e5", "network": {"id": "2a4f6f01-ad28-4f8f-b835-ea86e1791f49", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1577320995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82346c440c3343a0a5c233a48203a13c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2a62d4f-3b", "ovs_interfaceid": "d2a62d4f-3bdc-4367-8694-9ba47bdfd799", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1349.300778] env[62522]: INFO nova.compute.manager [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Took 11.53 seconds to build instance. [ 1349.771019] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Releasing lock "refresh_cache-da11bae6-484b-455e-9462-6f5143d2a9a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1349.797423] env[62522]: DEBUG nova.virt.hardware [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='0dee9db7659e9bd44b4d33e6c3af18fd',container_format='bare',created_at=2025-02-10T12:30:31Z,direct_url=,disk_format='vmdk',id=414dd827-3fec-484e-897f-aa53cdec2e35,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-1490896788-shelved',owner='82346c440c3343a0a5c233a48203a13c',properties=ImageMetaProps,protected=,size=31663616,status='active',tags=,updated_at=2025-02-10T12:30:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1349.797736] env[62522]: DEBUG nova.virt.hardware [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1349.797839] env[62522]: DEBUG nova.virt.hardware [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1349.798043] env[62522]: DEBUG nova.virt.hardware [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1349.798232] env[62522]: DEBUG nova.virt.hardware [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1349.798388] env[62522]: DEBUG nova.virt.hardware [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1349.798602] env[62522]: DEBUG nova.virt.hardware [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1349.798763] env[62522]: DEBUG nova.virt.hardware [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1349.798933] env[62522]: DEBUG nova.virt.hardware [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1349.799108] env[62522]: DEBUG nova.virt.hardware [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1349.799286] env[62522]: DEBUG nova.virt.hardware [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1349.800145] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32581cef-2ad5-4772-9e72-521b5509f0b0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.803034] env[62522]: DEBUG oslo_concurrency.lockutils [None req-27c84fad-53ad-465f-8729-edb36190aba5 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "c95f697b-0d68-489d-bfc4-9d129eab1be2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.042s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1349.808819] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bcfded2-33ec-4cb6-84eb-b0fb5e56ef6c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.822420] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:73:de:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '193994c7-8e1b-4f25-a4a4-d0563845eb28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd2a62d4f-3bdc-4367-8694-9ba47bdfd799', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1349.829711] env[62522]: DEBUG oslo.service.loopingcall [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1349.829979] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1349.830513] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8bee1619-106b-4e94-aec6-76acedf40bfd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.849730] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1349.849730] env[62522]: value = "task-2416529" [ 1349.849730] env[62522]: _type = "Task" [ 1349.849730] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.856884] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416529, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.361582] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416529, 'name': CreateVM_Task, 'duration_secs': 0.304137} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.361754] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1350.362442] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/414dd827-3fec-484e-897f-aa53cdec2e35" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1350.362608] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquired lock "[datastore1] devstack-image-cache_base/414dd827-3fec-484e-897f-aa53cdec2e35" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1350.363013] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/414dd827-3fec-484e-897f-aa53cdec2e35" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1350.363287] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2521a1a0-dc2f-46a7-b437-53c0c0fbba05 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.367754] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1350.367754] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]524f3fc2-e630-1a2c-0073-df13049d5f57" [ 1350.367754] env[62522]: _type = "Task" [ 1350.367754] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.375087] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]524f3fc2-e630-1a2c-0073-df13049d5f57, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.513424] env[62522]: DEBUG nova.compute.manager [req-ce563947-658f-43cf-846f-bebe40336b5e req-bba12dfe-0acf-411d-8598-56dd652a8591 service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Received event network-changed-d2a62d4f-3bdc-4367-8694-9ba47bdfd799 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1350.513629] env[62522]: DEBUG nova.compute.manager [req-ce563947-658f-43cf-846f-bebe40336b5e req-bba12dfe-0acf-411d-8598-56dd652a8591 service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Refreshing instance network info cache due to event network-changed-d2a62d4f-3bdc-4367-8694-9ba47bdfd799. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1350.513866] env[62522]: DEBUG oslo_concurrency.lockutils [req-ce563947-658f-43cf-846f-bebe40336b5e req-bba12dfe-0acf-411d-8598-56dd652a8591 service nova] Acquiring lock "refresh_cache-da11bae6-484b-455e-9462-6f5143d2a9a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1350.514029] env[62522]: DEBUG oslo_concurrency.lockutils [req-ce563947-658f-43cf-846f-bebe40336b5e req-bba12dfe-0acf-411d-8598-56dd652a8591 service nova] Acquired lock "refresh_cache-da11bae6-484b-455e-9462-6f5143d2a9a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1350.514204] env[62522]: DEBUG nova.network.neutron [req-ce563947-658f-43cf-846f-bebe40336b5e req-bba12dfe-0acf-411d-8598-56dd652a8591 service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Refreshing network info cache for port d2a62d4f-3bdc-4367-8694-9ba47bdfd799 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1350.877878] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Releasing lock "[datastore1] devstack-image-cache_base/414dd827-3fec-484e-897f-aa53cdec2e35" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1350.878192] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Processing image 414dd827-3fec-484e-897f-aa53cdec2e35 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1350.878378] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/414dd827-3fec-484e-897f-aa53cdec2e35/414dd827-3fec-484e-897f-aa53cdec2e35.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1350.878529] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquired lock "[datastore1] devstack-image-cache_base/414dd827-3fec-484e-897f-aa53cdec2e35/414dd827-3fec-484e-897f-aa53cdec2e35.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1350.879078] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1350.879078] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9c005c2e-2b78-456b-acd2-ef8880c19e32 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.895371] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1350.895558] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1350.896324] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e6ba4b6-204c-47cf-9a95-d729ba486b45 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.902054] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1350.902054] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f06caa-01f8-ae3b-a8f4-cc1f29ef5f1c" [ 1350.902054] env[62522]: _type = "Task" [ 1350.902054] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.909135] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f06caa-01f8-ae3b-a8f4-cc1f29ef5f1c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.227842] env[62522]: DEBUG nova.network.neutron [req-ce563947-658f-43cf-846f-bebe40336b5e req-bba12dfe-0acf-411d-8598-56dd652a8591 service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Updated VIF entry in instance network info cache for port d2a62d4f-3bdc-4367-8694-9ba47bdfd799. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1351.228281] env[62522]: DEBUG nova.network.neutron [req-ce563947-658f-43cf-846f-bebe40336b5e req-bba12dfe-0acf-411d-8598-56dd652a8591 service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Updating instance_info_cache with network_info: [{"id": "d2a62d4f-3bdc-4367-8694-9ba47bdfd799", "address": "fa:16:3e:73:de:e5", "network": {"id": "2a4f6f01-ad28-4f8f-b835-ea86e1791f49", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1577320995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82346c440c3343a0a5c233a48203a13c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2a62d4f-3b", "ovs_interfaceid": "d2a62d4f-3bdc-4367-8694-9ba47bdfd799", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1351.412205] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Preparing fetch location {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1351.412457] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Fetch image to [datastore1] OSTACK_IMG_e28e0743-fcd9-42b0-b97d-406c39a5d183/OSTACK_IMG_e28e0743-fcd9-42b0-b97d-406c39a5d183.vmdk {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1351.412710] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Downloading stream optimized image 414dd827-3fec-484e-897f-aa53cdec2e35 to [datastore1] OSTACK_IMG_e28e0743-fcd9-42b0-b97d-406c39a5d183/OSTACK_IMG_e28e0743-fcd9-42b0-b97d-406c39a5d183.vmdk on the data store datastore1 as vApp {{(pid=62522) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1351.412860] env[62522]: DEBUG nova.virt.vmwareapi.images [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Downloading image file data 414dd827-3fec-484e-897f-aa53cdec2e35 to the ESX as VM named 'OSTACK_IMG_e28e0743-fcd9-42b0-b97d-406c39a5d183' {{(pid=62522) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1351.484927] env[62522]: DEBUG oslo_vmware.rw_handles [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1351.484927] env[62522]: value = "resgroup-9" [ 1351.484927] env[62522]: _type = "ResourcePool" [ 1351.484927] env[62522]: }. {{(pid=62522) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1351.485265] env[62522]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-cd930df5-6974-47b7-8f96-6205d44a79f1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.505472] env[62522]: DEBUG oslo_vmware.rw_handles [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lease: (returnval){ [ 1351.505472] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52371953-9a90-0827-549d-8a05fdad8894" [ 1351.505472] env[62522]: _type = "HttpNfcLease" [ 1351.505472] env[62522]: } obtained for vApp import into resource pool (val){ [ 1351.505472] env[62522]: value = "resgroup-9" [ 1351.505472] env[62522]: _type = "ResourcePool" [ 1351.505472] env[62522]: }. {{(pid=62522) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1351.505854] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the lease: (returnval){ [ 1351.505854] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52371953-9a90-0827-549d-8a05fdad8894" [ 1351.505854] env[62522]: _type = "HttpNfcLease" [ 1351.505854] env[62522]: } to be ready. {{(pid=62522) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1351.511840] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1351.511840] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52371953-9a90-0827-549d-8a05fdad8894" [ 1351.511840] env[62522]: _type = "HttpNfcLease" [ 1351.511840] env[62522]: } is initializing. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1351.730571] env[62522]: DEBUG oslo_concurrency.lockutils [req-ce563947-658f-43cf-846f-bebe40336b5e req-bba12dfe-0acf-411d-8598-56dd652a8591 service nova] Releasing lock "refresh_cache-da11bae6-484b-455e-9462-6f5143d2a9a9" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1351.730899] env[62522]: DEBUG nova.compute.manager [req-ce563947-658f-43cf-846f-bebe40336b5e req-bba12dfe-0acf-411d-8598-56dd652a8591 service nova] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Received event network-changed-74c8cccc-0aa8-4147-9172-cbb2cbfcb35f {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1351.731115] env[62522]: DEBUG nova.compute.manager [req-ce563947-658f-43cf-846f-bebe40336b5e req-bba12dfe-0acf-411d-8598-56dd652a8591 service nova] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Refreshing instance network info cache due to event network-changed-74c8cccc-0aa8-4147-9172-cbb2cbfcb35f. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1351.731357] env[62522]: DEBUG oslo_concurrency.lockutils [req-ce563947-658f-43cf-846f-bebe40336b5e req-bba12dfe-0acf-411d-8598-56dd652a8591 service nova] Acquiring lock "refresh_cache-c95f697b-0d68-489d-bfc4-9d129eab1be2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1351.731514] env[62522]: DEBUG oslo_concurrency.lockutils [req-ce563947-658f-43cf-846f-bebe40336b5e req-bba12dfe-0acf-411d-8598-56dd652a8591 service nova] Acquired lock "refresh_cache-c95f697b-0d68-489d-bfc4-9d129eab1be2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1351.731704] env[62522]: DEBUG nova.network.neutron [req-ce563947-658f-43cf-846f-bebe40336b5e req-bba12dfe-0acf-411d-8598-56dd652a8591 service nova] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Refreshing network info cache for port 74c8cccc-0aa8-4147-9172-cbb2cbfcb35f {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1352.013702] env[62522]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1352.013702] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52371953-9a90-0827-549d-8a05fdad8894" [ 1352.013702] env[62522]: _type = "HttpNfcLease" [ 1352.013702] env[62522]: } is ready. {{(pid=62522) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1352.014224] env[62522]: DEBUG oslo_vmware.rw_handles [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1352.014224] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52371953-9a90-0827-549d-8a05fdad8894" [ 1352.014224] env[62522]: _type = "HttpNfcLease" [ 1352.014224] env[62522]: }. {{(pid=62522) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1352.014778] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cd62d16-7006-47d5-a006-8114fb58ff97 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.021860] env[62522]: DEBUG oslo_vmware.rw_handles [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52685d7c-efef-ef26-5246-4b7963890346/disk-0.vmdk from lease info. {{(pid=62522) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1352.022051] env[62522]: DEBUG oslo_vmware.rw_handles [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Creating HTTP connection to write to file with size = 31663616 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52685d7c-efef-ef26-5246-4b7963890346/disk-0.vmdk. {{(pid=62522) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1352.087078] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3e4c0458-f567-44d5-a782-d0d88277e61a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.241951] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1352.246544] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1352.246707] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Starting heal instance info cache {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1352.246809] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Rebuilding the list of instances to heal {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 1352.456462] env[62522]: DEBUG nova.network.neutron [req-ce563947-658f-43cf-846f-bebe40336b5e req-bba12dfe-0acf-411d-8598-56dd652a8591 service nova] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Updated VIF entry in instance network info cache for port 74c8cccc-0aa8-4147-9172-cbb2cbfcb35f. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1352.456856] env[62522]: DEBUG nova.network.neutron [req-ce563947-658f-43cf-846f-bebe40336b5e req-bba12dfe-0acf-411d-8598-56dd652a8591 service nova] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Updating instance_info_cache with network_info: [{"id": "74c8cccc-0aa8-4147-9172-cbb2cbfcb35f", "address": "fa:16:3e:88:4a:7a", "network": {"id": "fd993a8b-571c-4873-a5d6-4d8c60e23d38", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2084093882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "071dd4c295a54e388099d5bf0f4e300b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap74c8cccc-0a", "ovs_interfaceid": "74c8cccc-0aa8-4147-9172-cbb2cbfcb35f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1352.783663] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "refresh_cache-b31195c2-29f4-475c-baa7-fcb4791b7278" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1352.783821] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquired lock "refresh_cache-b31195c2-29f4-475c-baa7-fcb4791b7278" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1352.783975] env[62522]: DEBUG nova.network.neutron [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Forcefully refreshing network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1352.784157] env[62522]: DEBUG nova.objects.instance [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lazy-loading 'info_cache' on Instance uuid b31195c2-29f4-475c-baa7-fcb4791b7278 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1352.960663] env[62522]: DEBUG oslo_concurrency.lockutils [req-ce563947-658f-43cf-846f-bebe40336b5e req-bba12dfe-0acf-411d-8598-56dd652a8591 service nova] Releasing lock "refresh_cache-c95f697b-0d68-489d-bfc4-9d129eab1be2" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1353.215550] env[62522]: DEBUG oslo_vmware.rw_handles [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Completed reading data from the image iterator. {{(pid=62522) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1353.216045] env[62522]: DEBUG oslo_vmware.rw_handles [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52685d7c-efef-ef26-5246-4b7963890346/disk-0.vmdk. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1353.216781] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfaf1303-576c-4506-a272-c63bee1e070f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.223615] env[62522]: DEBUG oslo_vmware.rw_handles [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52685d7c-efef-ef26-5246-4b7963890346/disk-0.vmdk is in state: ready. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1353.223796] env[62522]: DEBUG oslo_vmware.rw_handles [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52685d7c-efef-ef26-5246-4b7963890346/disk-0.vmdk. {{(pid=62522) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1353.224060] env[62522]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-6f18d11c-7818-4315-938b-35086a55b74a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.441103] env[62522]: DEBUG oslo_vmware.rw_handles [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52685d7c-efef-ef26-5246-4b7963890346/disk-0.vmdk. {{(pid=62522) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1353.441405] env[62522]: INFO nova.virt.vmwareapi.images [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Downloaded image file data 414dd827-3fec-484e-897f-aa53cdec2e35 [ 1353.442273] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7198c28-ee59-42fb-9ffb-66db47328045 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.458209] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-39bd63d9-92aa-4867-90e8-b48d9d4b8889 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.482138] env[62522]: INFO nova.virt.vmwareapi.images [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] The imported VM was unregistered [ 1353.484434] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Caching image {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1353.484669] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Creating directory with path [datastore1] devstack-image-cache_base/414dd827-3fec-484e-897f-aa53cdec2e35 {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1353.484960] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d718b9ec-4975-431c-8013-a686a3a5057d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.517241] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Created directory with path [datastore1] devstack-image-cache_base/414dd827-3fec-484e-897f-aa53cdec2e35 {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1353.517412] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_e28e0743-fcd9-42b0-b97d-406c39a5d183/OSTACK_IMG_e28e0743-fcd9-42b0-b97d-406c39a5d183.vmdk to [datastore1] devstack-image-cache_base/414dd827-3fec-484e-897f-aa53cdec2e35/414dd827-3fec-484e-897f-aa53cdec2e35.vmdk. {{(pid=62522) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1353.517663] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-bd125248-9b11-46df-85ed-a01d1e6b3cbb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.524231] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1353.524231] env[62522]: value = "task-2416534" [ 1353.524231] env[62522]: _type = "Task" [ 1353.524231] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.531420] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416534, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.035100] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416534, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.535303] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416534, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.770514] env[62522]: DEBUG nova.network.neutron [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Updating instance_info_cache with network_info: [{"id": "58444651-b47b-44d5-b240-53949c79df86", "address": "fa:16:3e:6c:81:68", "network": {"id": "fd993a8b-571c-4873-a5d6-4d8c60e23d38", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2084093882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "071dd4c295a54e388099d5bf0f4e300b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58444651-b4", "ovs_interfaceid": "58444651-b47b-44d5-b240-53949c79df86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1355.036222] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416534, 'name': MoveVirtualDisk_Task} progress is 26%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.273873] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Releasing lock "refresh_cache-b31195c2-29f4-475c-baa7-fcb4791b7278" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1355.274120] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Updated the network info_cache for instance {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 1355.274348] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1355.274513] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1355.274661] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1355.274830] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1355.275037] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1355.275186] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62522) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1355.275346] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1355.536487] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416534, 'name': MoveVirtualDisk_Task} progress is 49%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.778652] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1355.778938] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1355.779094] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1355.779261] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62522) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1355.780199] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a5850f5-778d-46b4-abf4-4d4cf8cc2f3d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.788743] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb89d0de-3521-47f6-a57f-6273a66d4c3c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.805033] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c87dac17-0bef-4af2-a069-4f3c6acabc73 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.812085] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-082f8339-4cc4-493e-bd7d-49eb7764a948 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.841846] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179929MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=62522) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1355.842048] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1355.842233] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1356.037494] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416534, 'name': MoveVirtualDisk_Task} progress is 71%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.538397] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416534, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.870870] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance b31195c2-29f4-475c-baa7-fcb4791b7278 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1356.871091] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 27f4b976-7dff-49b0-9b00-7515cb976e72 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1356.871250] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance ecc70761-8f69-48f6-8e81-7d2ba3728c70 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1356.871373] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance fd9af7c3-358e-417f-97f4-fd2d67d21300 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1356.871490] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance c95f697b-0d68-489d-bfc4-9d129eab1be2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1356.871604] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance da11bae6-484b-455e-9462-6f5143d2a9a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1356.871792] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=62522) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1356.871929] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=62522) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1356.955042] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c89d8f4d-c728-4713-baa5-485f5831aa7c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.962435] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eea0d0e-3e50-478a-9597-940c0164be5f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.992853] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65579dad-acc9-476f-90e0-f3301f237bf9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.000215] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57de6f60-8145-431b-8b22-58e406983cc6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.013163] env[62522]: DEBUG nova.compute.provider_tree [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1357.037215] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416534, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.324558} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.037703] env[62522]: INFO nova.virt.vmwareapi.ds_util [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_e28e0743-fcd9-42b0-b97d-406c39a5d183/OSTACK_IMG_e28e0743-fcd9-42b0-b97d-406c39a5d183.vmdk to [datastore1] devstack-image-cache_base/414dd827-3fec-484e-897f-aa53cdec2e35/414dd827-3fec-484e-897f-aa53cdec2e35.vmdk. [ 1357.037891] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Cleaning up location [datastore1] OSTACK_IMG_e28e0743-fcd9-42b0-b97d-406c39a5d183 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1357.038064] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_e28e0743-fcd9-42b0-b97d-406c39a5d183 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1357.038299] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eba89d92-d893-42c0-b761-5dab8e69089d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.044210] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1357.044210] env[62522]: value = "task-2416536" [ 1357.044210] env[62522]: _type = "Task" [ 1357.044210] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.051484] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416536, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.516388] env[62522]: DEBUG nova.scheduler.client.report [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1357.554562] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416536, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.034337} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.554914] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1357.555060] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Releasing lock "[datastore1] devstack-image-cache_base/414dd827-3fec-484e-897f-aa53cdec2e35/414dd827-3fec-484e-897f-aa53cdec2e35.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1357.555353] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/414dd827-3fec-484e-897f-aa53cdec2e35/414dd827-3fec-484e-897f-aa53cdec2e35.vmdk to [datastore1] da11bae6-484b-455e-9462-6f5143d2a9a9/da11bae6-484b-455e-9462-6f5143d2a9a9.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1357.555597] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-48664f5d-c583-4745-876b-65059ac2b47a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.562083] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1357.562083] env[62522]: value = "task-2416538" [ 1357.562083] env[62522]: _type = "Task" [ 1357.562083] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.570319] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416538, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.021082] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62522) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1358.021082] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.179s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1358.072420] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416538, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.575373] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416538, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.074638] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416538, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.576114] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416538, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.077735] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416538, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.232719} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.078069] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/414dd827-3fec-484e-897f-aa53cdec2e35/414dd827-3fec-484e-897f-aa53cdec2e35.vmdk to [datastore1] da11bae6-484b-455e-9462-6f5143d2a9a9/da11bae6-484b-455e-9462-6f5143d2a9a9.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1360.078938] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e51dd7e9-a835-4ba8-bde1-a6e4aed740a7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.100998] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] da11bae6-484b-455e-9462-6f5143d2a9a9/da11bae6-484b-455e-9462-6f5143d2a9a9.vmdk or device None with type streamOptimized {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1360.101376] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e290944-8c9f-4a70-9d70-b77f0d7cb3bf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.120759] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1360.120759] env[62522]: value = "task-2416540" [ 1360.120759] env[62522]: _type = "Task" [ 1360.120759] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.128218] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416540, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.630768] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416540, 'name': ReconfigVM_Task, 'duration_secs': 0.294034} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.631162] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Reconfigured VM instance instance-00000071 to attach disk [datastore1] da11bae6-484b-455e-9462-6f5143d2a9a9/da11bae6-484b-455e-9462-6f5143d2a9a9.vmdk or device None with type streamOptimized {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1360.631685] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2b3fa952-16ff-4835-8cb4-b4deee0c99c9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.638227] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1360.638227] env[62522]: value = "task-2416541" [ 1360.638227] env[62522]: _type = "Task" [ 1360.638227] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.645119] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416541, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.148098] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416541, 'name': Rename_Task, 'duration_secs': 0.153044} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.148401] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1361.148647] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e7622880-bbb7-4ee1-b692-440c96262b76 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.157782] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1361.157782] env[62522]: value = "task-2416542" [ 1361.157782] env[62522]: _type = "Task" [ 1361.157782] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.165710] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416542, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.667818] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416542, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.169650] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416542, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.668979] env[62522]: DEBUG oslo_vmware.api [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416542, 'name': PowerOnVM_Task, 'duration_secs': 1.049991} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.669301] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1362.769820] env[62522]: DEBUG nova.compute.manager [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1362.770819] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fcc142d-dcd8-4660-a57d-e120324048d5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.288842] env[62522]: DEBUG oslo_concurrency.lockutils [None req-6b51be14-82e7-47d8-b606-50ad4f35e5a6 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "da11bae6-484b-455e-9462-6f5143d2a9a9" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 20.604s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1363.816304] env[62522]: DEBUG oslo_concurrency.lockutils [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "da11bae6-484b-455e-9462-6f5143d2a9a9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1363.816711] env[62522]: DEBUG oslo_concurrency.lockutils [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "da11bae6-484b-455e-9462-6f5143d2a9a9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1363.816840] env[62522]: DEBUG oslo_concurrency.lockutils [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "da11bae6-484b-455e-9462-6f5143d2a9a9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1363.816984] env[62522]: DEBUG oslo_concurrency.lockutils [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "da11bae6-484b-455e-9462-6f5143d2a9a9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1363.817179] env[62522]: DEBUG oslo_concurrency.lockutils [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "da11bae6-484b-455e-9462-6f5143d2a9a9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1363.819380] env[62522]: INFO nova.compute.manager [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Terminating instance [ 1364.322880] env[62522]: DEBUG nova.compute.manager [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1364.323172] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1364.324105] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1afb22f4-a8ea-4980-9f08-b0dd059cba16 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.333389] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1364.333613] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5f724fb2-7a57-4d87-ab4f-74fd20b398c2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.339896] env[62522]: DEBUG oslo_vmware.api [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1364.339896] env[62522]: value = "task-2416545" [ 1364.339896] env[62522]: _type = "Task" [ 1364.339896] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.347438] env[62522]: DEBUG oslo_vmware.api [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416545, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.849801] env[62522]: DEBUG oslo_vmware.api [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416545, 'name': PowerOffVM_Task, 'duration_secs': 0.181808} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.850160] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1364.850883] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1364.851183] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0f1c04d9-9181-4049-8376-11cc1a21f865 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.914662] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1364.914855] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1364.915108] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Deleting the datastore file [datastore1] da11bae6-484b-455e-9462-6f5143d2a9a9 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1364.915374] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-528cf750-9460-4364-838d-631c239821fc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.922692] env[62522]: DEBUG oslo_vmware.api [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for the task: (returnval){ [ 1364.922692] env[62522]: value = "task-2416547" [ 1364.922692] env[62522]: _type = "Task" [ 1364.922692] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.929868] env[62522]: DEBUG oslo_vmware.api [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416547, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.433352] env[62522]: DEBUG oslo_vmware.api [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Task: {'id': task-2416547, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.124116} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.433545] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1365.433727] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1365.433903] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1365.434089] env[62522]: INFO nova.compute.manager [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1365.434337] env[62522]: DEBUG oslo.service.loopingcall [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1365.434528] env[62522]: DEBUG nova.compute.manager [-] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1365.434624] env[62522]: DEBUG nova.network.neutron [-] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1365.899082] env[62522]: DEBUG nova.compute.manager [req-0ae1d110-6ff1-44c4-9d54-db0e196b28b1 req-06d74df9-d05a-4f5a-9b71-081c9b8cfbe2 service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Received event network-vif-deleted-d2a62d4f-3bdc-4367-8694-9ba47bdfd799 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1365.899330] env[62522]: INFO nova.compute.manager [req-0ae1d110-6ff1-44c4-9d54-db0e196b28b1 req-06d74df9-d05a-4f5a-9b71-081c9b8cfbe2 service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Neutron deleted interface d2a62d4f-3bdc-4367-8694-9ba47bdfd799; detaching it from the instance and deleting it from the info cache [ 1365.899443] env[62522]: DEBUG nova.network.neutron [req-0ae1d110-6ff1-44c4-9d54-db0e196b28b1 req-06d74df9-d05a-4f5a-9b71-081c9b8cfbe2 service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1366.383136] env[62522]: DEBUG nova.network.neutron [-] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1366.401695] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7f54c7a6-3dfb-4ce8-bd9b-2dd383c4acb8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.411349] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d618c8e0-570b-4c93-abca-75fa2c340e47 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.437724] env[62522]: DEBUG nova.compute.manager [req-0ae1d110-6ff1-44c4-9d54-db0e196b28b1 req-06d74df9-d05a-4f5a-9b71-081c9b8cfbe2 service nova] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Detach interface failed, port_id=d2a62d4f-3bdc-4367-8694-9ba47bdfd799, reason: Instance da11bae6-484b-455e-9462-6f5143d2a9a9 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1366.886884] env[62522]: INFO nova.compute.manager [-] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Took 1.45 seconds to deallocate network for instance. [ 1367.393888] env[62522]: DEBUG oslo_concurrency.lockutils [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1367.394373] env[62522]: DEBUG oslo_concurrency.lockutils [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1367.394420] env[62522]: DEBUG nova.objects.instance [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lazy-loading 'resources' on Instance uuid da11bae6-484b-455e-9462-6f5143d2a9a9 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1367.974170] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c92bd3f-7b68-486e-8784-c9031b637668 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.983467] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd7b429-116c-4f11-ae04-0e9bda94cb71 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.013089] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ad68900-549a-4c6e-99fa-0a7da17a4a29 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.019773] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-032668fa-1ae6-4da6-9c35-6115792f9d32 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.032350] env[62522]: DEBUG nova.compute.provider_tree [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1368.535217] env[62522]: DEBUG nova.scheduler.client.report [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1369.040332] env[62522]: DEBUG oslo_concurrency.lockutils [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.645s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1369.060815] env[62522]: INFO nova.scheduler.client.report [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Deleted allocations for instance da11bae6-484b-455e-9462-6f5143d2a9a9 [ 1369.568515] env[62522]: DEBUG oslo_concurrency.lockutils [None req-94bbdf5d-32b5-4fa5-b18c-2d2ddccf2698 tempest-ServerActionsTestOtherB-610600767 tempest-ServerActionsTestOtherB-610600767-project-member] Lock "da11bae6-484b-455e-9462-6f5143d2a9a9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.752s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1369.782737] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "75ba1afc-3586-4bb0-ae7f-ebf5a794f068" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1369.782981] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "75ba1afc-3586-4bb0-ae7f-ebf5a794f068" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1370.000104] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1e769005-f19c-4c71-8887-e272f168b422 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "ecc70761-8f69-48f6-8e81-7d2ba3728c70" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1370.000349] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1e769005-f19c-4c71-8887-e272f168b422 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "ecc70761-8f69-48f6-8e81-7d2ba3728c70" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1370.285385] env[62522]: DEBUG nova.compute.manager [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1370.503279] env[62522]: DEBUG nova.compute.utils [None req-1e769005-f19c-4c71-8887-e272f168b422 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1370.809877] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1370.810183] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1370.812169] env[62522]: INFO nova.compute.claims [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1371.006343] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1e769005-f19c-4c71-8887-e272f168b422 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "ecc70761-8f69-48f6-8e81-7d2ba3728c70" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1371.839610] env[62522]: DEBUG nova.scheduler.client.report [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Refreshing inventories for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1371.852084] env[62522]: DEBUG nova.scheduler.client.report [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Updating ProviderTree inventory for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1371.852295] env[62522]: DEBUG nova.compute.provider_tree [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Updating inventory in ProviderTree for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1371.861836] env[62522]: DEBUG nova.scheduler.client.report [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Refreshing aggregate associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, aggregates: None {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1371.877934] env[62522]: DEBUG nova.scheduler.client.report [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Refreshing trait associations for resource provider c7fa38b2-245d-4337-a012-22c1a01c0a72, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=62522) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1371.944291] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-392f65a1-cc50-4209-9328-28da034d573b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.952138] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6706347a-fb6b-4540-a84d-c10cdefb6c35 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.982827] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe309dd9-ed0d-416b-97e5-7c3f3d8a3303 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.989786] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea52fbd-a67d-4fc1-bf6b-4a32f3ce12aa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.002651] env[62522]: DEBUG nova.compute.provider_tree [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1372.062403] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1e769005-f19c-4c71-8887-e272f168b422 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "ecc70761-8f69-48f6-8e81-7d2ba3728c70" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1372.062647] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1e769005-f19c-4c71-8887-e272f168b422 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "ecc70761-8f69-48f6-8e81-7d2ba3728c70" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1372.062875] env[62522]: INFO nova.compute.manager [None req-1e769005-f19c-4c71-8887-e272f168b422 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Attaching volume 51b1f1b3-8aeb-43ee-9480-eebe285a140a to /dev/sdb [ 1372.092247] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f807260-f20b-4dbe-adc7-9294cbfc896f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.099518] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-137698fb-ce5c-4764-b481-95c9da957599 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.112653] env[62522]: DEBUG nova.virt.block_device [None req-1e769005-f19c-4c71-8887-e272f168b422 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Updating existing volume attachment record: 35f6e135-5890-479f-9e6e-dfadaf4da445 {{(pid=62522) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1372.506905] env[62522]: DEBUG nova.scheduler.client.report [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1373.012584] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.202s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1373.013053] env[62522]: DEBUG nova.compute.manager [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1373.518314] env[62522]: DEBUG nova.compute.utils [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1373.519689] env[62522]: DEBUG nova.compute.manager [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1373.519860] env[62522]: DEBUG nova.network.neutron [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1373.592412] env[62522]: DEBUG nova.policy [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3a4ba3a3d3a34495b7a7e0618577d60f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '071dd4c295a54e388099d5bf0f4e300b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1374.023984] env[62522]: DEBUG nova.compute.manager [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1374.085350] env[62522]: DEBUG nova.network.neutron [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Successfully created port: 5a9957c5-133c-4c1b-8469-dbd152ccd386 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1374.530101] env[62522]: INFO nova.virt.block_device [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Booting with volume cb4b8a20-0671-45d5-bf05-a9de0e808c88 at /dev/sda [ 1374.565964] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e30bb9f4-c46f-411e-a5bf-8ffdbb9bb288 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.577947] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3001843-6c11-42bb-a04d-56d553ee352a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.606761] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b81b0fdc-2d97-459f-b86d-51455174349d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.615621] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eff7feb6-6ff2-4f65-a5c0-82f59a1cc7b0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.642919] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261a3301-8e9c-4251-be49-fdf4500ac351 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.649419] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e733a72-1eb2-4b14-ad39-b43aa864a699 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.663211] env[62522]: DEBUG nova.virt.block_device [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Updating existing volume attachment record: a6b3108e-ae6d-48e1-95fa-49be0b77b6f8 {{(pid=62522) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1375.743618] env[62522]: DEBUG nova.compute.manager [req-83bdc6b0-b29f-46f1-9123-26522aed13fe req-1b1f24a6-e492-44ba-885e-a42a8be18176 service nova] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Received event network-vif-plugged-5a9957c5-133c-4c1b-8469-dbd152ccd386 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1375.743857] env[62522]: DEBUG oslo_concurrency.lockutils [req-83bdc6b0-b29f-46f1-9123-26522aed13fe req-1b1f24a6-e492-44ba-885e-a42a8be18176 service nova] Acquiring lock "75ba1afc-3586-4bb0-ae7f-ebf5a794f068-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1375.743988] env[62522]: DEBUG oslo_concurrency.lockutils [req-83bdc6b0-b29f-46f1-9123-26522aed13fe req-1b1f24a6-e492-44ba-885e-a42a8be18176 service nova] Lock "75ba1afc-3586-4bb0-ae7f-ebf5a794f068-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1375.744068] env[62522]: DEBUG oslo_concurrency.lockutils [req-83bdc6b0-b29f-46f1-9123-26522aed13fe req-1b1f24a6-e492-44ba-885e-a42a8be18176 service nova] Lock "75ba1afc-3586-4bb0-ae7f-ebf5a794f068-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1375.745217] env[62522]: DEBUG nova.compute.manager [req-83bdc6b0-b29f-46f1-9123-26522aed13fe req-1b1f24a6-e492-44ba-885e-a42a8be18176 service nova] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] No waiting events found dispatching network-vif-plugged-5a9957c5-133c-4c1b-8469-dbd152ccd386 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1375.745217] env[62522]: WARNING nova.compute.manager [req-83bdc6b0-b29f-46f1-9123-26522aed13fe req-1b1f24a6-e492-44ba-885e-a42a8be18176 service nova] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Received unexpected event network-vif-plugged-5a9957c5-133c-4c1b-8469-dbd152ccd386 for instance with vm_state building and task_state block_device_mapping. [ 1375.944784] env[62522]: DEBUG nova.network.neutron [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Successfully updated port: 5a9957c5-133c-4c1b-8469-dbd152ccd386 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1376.450712] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "refresh_cache-75ba1afc-3586-4bb0-ae7f-ebf5a794f068" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1376.450712] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquired lock "refresh_cache-75ba1afc-3586-4bb0-ae7f-ebf5a794f068" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1376.450712] env[62522]: DEBUG nova.network.neutron [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1376.655251] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e769005-f19c-4c71-8887-e272f168b422 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Volume attach. Driver type: vmdk {{(pid=62522) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1376.655529] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e769005-f19c-4c71-8887-e272f168b422 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489883', 'volume_id': '51b1f1b3-8aeb-43ee-9480-eebe285a140a', 'name': 'volume-51b1f1b3-8aeb-43ee-9480-eebe285a140a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ecc70761-8f69-48f6-8e81-7d2ba3728c70', 'attached_at': '', 'detached_at': '', 'volume_id': '51b1f1b3-8aeb-43ee-9480-eebe285a140a', 'serial': '51b1f1b3-8aeb-43ee-9480-eebe285a140a'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1376.656466] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09bca61a-96f2-4442-92e4-bd021aeee087 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.673875] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73c18b0f-7598-4ae8-8e29-42128fd92838 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.698280] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e769005-f19c-4c71-8887-e272f168b422 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] volume-51b1f1b3-8aeb-43ee-9480-eebe285a140a/volume-51b1f1b3-8aeb-43ee-9480-eebe285a140a.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1376.698574] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b47faaf4-f18f-4d70-b453-3d15496736f9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.717282] env[62522]: DEBUG oslo_vmware.api [None req-1e769005-f19c-4c71-8887-e272f168b422 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1376.717282] env[62522]: value = "task-2416552" [ 1376.717282] env[62522]: _type = "Task" [ 1376.717282] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.725637] env[62522]: DEBUG oslo_vmware.api [None req-1e769005-f19c-4c71-8887-e272f168b422 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416552, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.871791] env[62522]: DEBUG nova.compute.manager [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1376.872369] env[62522]: DEBUG nova.virt.hardware [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1376.872598] env[62522]: DEBUG nova.virt.hardware [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1376.872756] env[62522]: DEBUG nova.virt.hardware [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1376.872945] env[62522]: DEBUG nova.virt.hardware [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1376.873161] env[62522]: DEBUG nova.virt.hardware [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1376.873381] env[62522]: DEBUG nova.virt.hardware [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1376.873618] env[62522]: DEBUG nova.virt.hardware [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1376.873695] env[62522]: DEBUG nova.virt.hardware [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1376.873874] env[62522]: DEBUG nova.virt.hardware [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1376.874038] env[62522]: DEBUG nova.virt.hardware [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1376.874217] env[62522]: DEBUG nova.virt.hardware [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1376.875265] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f0a3ef1-595b-4cb1-871d-e887773a554d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.884653] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c1c57f8-6d11-4034-bce6-0f5f600ecfaf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.991798] env[62522]: DEBUG nova.network.neutron [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1377.205025] env[62522]: DEBUG nova.network.neutron [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Updating instance_info_cache with network_info: [{"id": "5a9957c5-133c-4c1b-8469-dbd152ccd386", "address": "fa:16:3e:52:7c:d9", "network": {"id": "fd993a8b-571c-4873-a5d6-4d8c60e23d38", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2084093882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "071dd4c295a54e388099d5bf0f4e300b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a9957c5-13", "ovs_interfaceid": "5a9957c5-133c-4c1b-8469-dbd152ccd386", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1377.227789] env[62522]: DEBUG oslo_vmware.api [None req-1e769005-f19c-4c71-8887-e272f168b422 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416552, 'name': ReconfigVM_Task, 'duration_secs': 0.408347} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.228770] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e769005-f19c-4c71-8887-e272f168b422 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Reconfigured VM instance instance-00000072 to attach disk [datastore1] volume-51b1f1b3-8aeb-43ee-9480-eebe285a140a/volume-51b1f1b3-8aeb-43ee-9480-eebe285a140a.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1377.234150] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-570da2b7-9969-4a10-875d-d7a1ac82538e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.256364] env[62522]: DEBUG oslo_vmware.api [None req-1e769005-f19c-4c71-8887-e272f168b422 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1377.256364] env[62522]: value = "task-2416553" [ 1377.256364] env[62522]: _type = "Task" [ 1377.256364] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.262479] env[62522]: DEBUG oslo_vmware.api [None req-1e769005-f19c-4c71-8887-e272f168b422 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416553, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.708079] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Releasing lock "refresh_cache-75ba1afc-3586-4bb0-ae7f-ebf5a794f068" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1377.708428] env[62522]: DEBUG nova.compute.manager [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Instance network_info: |[{"id": "5a9957c5-133c-4c1b-8469-dbd152ccd386", "address": "fa:16:3e:52:7c:d9", "network": {"id": "fd993a8b-571c-4873-a5d6-4d8c60e23d38", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2084093882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "071dd4c295a54e388099d5bf0f4e300b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a9957c5-13", "ovs_interfaceid": "5a9957c5-133c-4c1b-8469-dbd152ccd386", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1377.708852] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:7c:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd7b5f1ef-d4b9-4ec3-b047-17e4cb349d25', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5a9957c5-133c-4c1b-8469-dbd152ccd386', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1377.717076] env[62522]: DEBUG oslo.service.loopingcall [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1377.717321] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1377.717561] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-96f8c4ca-899d-4017-93a1-e15d9a584146 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.739025] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1377.739025] env[62522]: value = "task-2416554" [ 1377.739025] env[62522]: _type = "Task" [ 1377.739025] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.747774] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416554, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.761381] env[62522]: DEBUG oslo_vmware.api [None req-1e769005-f19c-4c71-8887-e272f168b422 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416553, 'name': ReconfigVM_Task, 'duration_secs': 0.179492} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.761381] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e769005-f19c-4c71-8887-e272f168b422 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489883', 'volume_id': '51b1f1b3-8aeb-43ee-9480-eebe285a140a', 'name': 'volume-51b1f1b3-8aeb-43ee-9480-eebe285a140a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ecc70761-8f69-48f6-8e81-7d2ba3728c70', 'attached_at': '', 'detached_at': '', 'volume_id': '51b1f1b3-8aeb-43ee-9480-eebe285a140a', 'serial': '51b1f1b3-8aeb-43ee-9480-eebe285a140a'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1377.780487] env[62522]: DEBUG nova.compute.manager [req-50c41fb6-6be7-4f3f-8c29-51b6ffc607f2 req-92b02cbe-867b-4cc0-819d-552165ca61b4 service nova] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Received event network-changed-5a9957c5-133c-4c1b-8469-dbd152ccd386 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1377.780634] env[62522]: DEBUG nova.compute.manager [req-50c41fb6-6be7-4f3f-8c29-51b6ffc607f2 req-92b02cbe-867b-4cc0-819d-552165ca61b4 service nova] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Refreshing instance network info cache due to event network-changed-5a9957c5-133c-4c1b-8469-dbd152ccd386. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1377.780858] env[62522]: DEBUG oslo_concurrency.lockutils [req-50c41fb6-6be7-4f3f-8c29-51b6ffc607f2 req-92b02cbe-867b-4cc0-819d-552165ca61b4 service nova] Acquiring lock "refresh_cache-75ba1afc-3586-4bb0-ae7f-ebf5a794f068" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1377.780999] env[62522]: DEBUG oslo_concurrency.lockutils [req-50c41fb6-6be7-4f3f-8c29-51b6ffc607f2 req-92b02cbe-867b-4cc0-819d-552165ca61b4 service nova] Acquired lock "refresh_cache-75ba1afc-3586-4bb0-ae7f-ebf5a794f068" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1377.781183] env[62522]: DEBUG nova.network.neutron [req-50c41fb6-6be7-4f3f-8c29-51b6ffc607f2 req-92b02cbe-867b-4cc0-819d-552165ca61b4 service nova] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Refreshing network info cache for port 5a9957c5-133c-4c1b-8469-dbd152ccd386 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1378.254375] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416554, 'name': CreateVM_Task, 'duration_secs': 0.324577} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.254643] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1378.255437] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'mount_device': '/dev/sda', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489882', 'volume_id': 'cb4b8a20-0671-45d5-bf05-a9de0e808c88', 'name': 'volume-cb4b8a20-0671-45d5-bf05-a9de0e808c88', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '75ba1afc-3586-4bb0-ae7f-ebf5a794f068', 'attached_at': '', 'detached_at': '', 'volume_id': 'cb4b8a20-0671-45d5-bf05-a9de0e808c88', 'serial': 'cb4b8a20-0671-45d5-bf05-a9de0e808c88'}, 'attachment_id': 'a6b3108e-ae6d-48e1-95fa-49be0b77b6f8', 'delete_on_termination': True, 'guest_format': None, 'device_type': None, 'boot_index': 0, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=62522) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1378.255565] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Root volume attach. Driver type: vmdk {{(pid=62522) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1378.256664] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c40fcd9c-8499-4013-b54f-5efc35ff3ca0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.268205] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a28d608-8549-43a5-a423-4675867cfa84 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.275868] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78978290-7946-48d5-94b4-4d9837b9ce03 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.288276] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-9657614a-4353-4d5e-8d7e-a7314ea3915e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.297955] env[62522]: DEBUG oslo_vmware.api [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1378.297955] env[62522]: value = "task-2416555" [ 1378.297955] env[62522]: _type = "Task" [ 1378.297955] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.307868] env[62522]: DEBUG oslo_vmware.api [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416555, 'name': RelocateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.562462] env[62522]: DEBUG nova.network.neutron [req-50c41fb6-6be7-4f3f-8c29-51b6ffc607f2 req-92b02cbe-867b-4cc0-819d-552165ca61b4 service nova] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Updated VIF entry in instance network info cache for port 5a9957c5-133c-4c1b-8469-dbd152ccd386. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1378.562993] env[62522]: DEBUG nova.network.neutron [req-50c41fb6-6be7-4f3f-8c29-51b6ffc607f2 req-92b02cbe-867b-4cc0-819d-552165ca61b4 service nova] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Updating instance_info_cache with network_info: [{"id": "5a9957c5-133c-4c1b-8469-dbd152ccd386", "address": "fa:16:3e:52:7c:d9", "network": {"id": "fd993a8b-571c-4873-a5d6-4d8c60e23d38", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2084093882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "071dd4c295a54e388099d5bf0f4e300b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a9957c5-13", "ovs_interfaceid": "5a9957c5-133c-4c1b-8469-dbd152ccd386", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1378.801740] env[62522]: DEBUG nova.objects.instance [None req-1e769005-f19c-4c71-8887-e272f168b422 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lazy-loading 'flavor' on Instance uuid ecc70761-8f69-48f6-8e81-7d2ba3728c70 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1378.811818] env[62522]: DEBUG oslo_vmware.api [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416555, 'name': RelocateVM_Task} progress is 43%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.066115] env[62522]: DEBUG oslo_concurrency.lockutils [req-50c41fb6-6be7-4f3f-8c29-51b6ffc607f2 req-92b02cbe-867b-4cc0-819d-552165ca61b4 service nova] Releasing lock "refresh_cache-75ba1afc-3586-4bb0-ae7f-ebf5a794f068" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1379.160014] env[62522]: DEBUG oslo_concurrency.lockutils [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Acquiring lock "c464ae64-056f-4629-add9-2ff7a1971ebb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1379.160302] env[62522]: DEBUG oslo_concurrency.lockutils [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Lock "c464ae64-056f-4629-add9-2ff7a1971ebb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1379.310379] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1e769005-f19c-4c71-8887-e272f168b422 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "ecc70761-8f69-48f6-8e81-7d2ba3728c70" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.248s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1379.317355] env[62522]: DEBUG oslo_vmware.api [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416555, 'name': RelocateVM_Task} progress is 56%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.627262] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0382c045-d633-4793-9bb9-081f6ba7dc38 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "ecc70761-8f69-48f6-8e81-7d2ba3728c70" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1379.627262] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0382c045-d633-4793-9bb9-081f6ba7dc38 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "ecc70761-8f69-48f6-8e81-7d2ba3728c70" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1379.664573] env[62522]: DEBUG nova.compute.manager [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1379.814611] env[62522]: DEBUG oslo_vmware.api [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416555, 'name': RelocateVM_Task} progress is 71%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.130051] env[62522]: INFO nova.compute.manager [None req-0382c045-d633-4793-9bb9-081f6ba7dc38 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Detaching volume 51b1f1b3-8aeb-43ee-9480-eebe285a140a [ 1380.172913] env[62522]: INFO nova.virt.block_device [None req-0382c045-d633-4793-9bb9-081f6ba7dc38 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Attempting to driver detach volume 51b1f1b3-8aeb-43ee-9480-eebe285a140a from mountpoint /dev/sdb [ 1380.173369] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-0382c045-d633-4793-9bb9-081f6ba7dc38 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Volume detach. Driver type: vmdk {{(pid=62522) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1380.173576] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-0382c045-d633-4793-9bb9-081f6ba7dc38 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489883', 'volume_id': '51b1f1b3-8aeb-43ee-9480-eebe285a140a', 'name': 'volume-51b1f1b3-8aeb-43ee-9480-eebe285a140a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ecc70761-8f69-48f6-8e81-7d2ba3728c70', 'attached_at': '', 'detached_at': '', 'volume_id': '51b1f1b3-8aeb-43ee-9480-eebe285a140a', 'serial': '51b1f1b3-8aeb-43ee-9480-eebe285a140a'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1380.174534] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f50e98-09f7-4a86-8852-f11cc0e00c4c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.200636] env[62522]: DEBUG oslo_concurrency.lockutils [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1380.200924] env[62522]: DEBUG oslo_concurrency.lockutils [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1380.202425] env[62522]: INFO nova.compute.claims [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1380.205395] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f042912-bcc5-4b96-b588-7a76befd8eae {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.213813] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88198eb-ad65-4d71-8dfe-710d3ab41cc3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.237216] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d2ece0f-1a69-4139-943c-6e6a199c6247 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.253797] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-0382c045-d633-4793-9bb9-081f6ba7dc38 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] The volume has not been displaced from its original location: [datastore1] volume-51b1f1b3-8aeb-43ee-9480-eebe285a140a/volume-51b1f1b3-8aeb-43ee-9480-eebe285a140a.vmdk. No consolidation needed. {{(pid=62522) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1380.259581] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-0382c045-d633-4793-9bb9-081f6ba7dc38 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Reconfiguring VM instance instance-00000072 to detach disk 2001 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1380.260367] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-31d291d9-0130-4929-9502-aa136ae38e54 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.280538] env[62522]: DEBUG oslo_vmware.api [None req-0382c045-d633-4793-9bb9-081f6ba7dc38 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1380.280538] env[62522]: value = "task-2416556" [ 1380.280538] env[62522]: _type = "Task" [ 1380.280538] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.289199] env[62522]: DEBUG oslo_vmware.api [None req-0382c045-d633-4793-9bb9-081f6ba7dc38 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416556, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.312938] env[62522]: DEBUG oslo_vmware.api [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416555, 'name': RelocateVM_Task} progress is 86%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.791329] env[62522]: DEBUG oslo_vmware.api [None req-0382c045-d633-4793-9bb9-081f6ba7dc38 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416556, 'name': ReconfigVM_Task, 'duration_secs': 0.376632} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.791580] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-0382c045-d633-4793-9bb9-081f6ba7dc38 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Reconfigured VM instance instance-00000072 to detach disk 2001 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1380.796261] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-31b6610c-402c-465c-811c-4242a96ac4e8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.815593] env[62522]: DEBUG oslo_vmware.api [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416555, 'name': RelocateVM_Task} progress is 97%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.817407] env[62522]: DEBUG oslo_vmware.api [None req-0382c045-d633-4793-9bb9-081f6ba7dc38 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1380.817407] env[62522]: value = "task-2416557" [ 1380.817407] env[62522]: _type = "Task" [ 1380.817407] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.824213] env[62522]: DEBUG oslo_vmware.api [None req-0382c045-d633-4793-9bb9-081f6ba7dc38 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416557, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.305660] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3646132a-2b0d-4034-b468-71c9e85c3b95 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.317943] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2e04595-3d8b-4a99-a47d-9a38f09694a8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.320976] env[62522]: DEBUG oslo_vmware.api [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416555, 'name': RelocateVM_Task, 'duration_secs': 2.937925} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.321331] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Volume attach. Driver type: vmdk {{(pid=62522) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1381.321535] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489882', 'volume_id': 'cb4b8a20-0671-45d5-bf05-a9de0e808c88', 'name': 'volume-cb4b8a20-0671-45d5-bf05-a9de0e808c88', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '75ba1afc-3586-4bb0-ae7f-ebf5a794f068', 'attached_at': '', 'detached_at': '', 'volume_id': 'cb4b8a20-0671-45d5-bf05-a9de0e808c88', 'serial': 'cb4b8a20-0671-45d5-bf05-a9de0e808c88'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1381.325206] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4d7178a-5a92-4438-a96a-7d90850665c8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.351496] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51ad9215-0dde-4407-a627-ef8bd988bb2e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.365994] env[62522]: DEBUG oslo_vmware.api [None req-0382c045-d633-4793-9bb9-081f6ba7dc38 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416557, 'name': ReconfigVM_Task, 'duration_secs': 0.137367} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.366730] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-0382c045-d633-4793-9bb9-081f6ba7dc38 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489883', 'volume_id': '51b1f1b3-8aeb-43ee-9480-eebe285a140a', 'name': 'volume-51b1f1b3-8aeb-43ee-9480-eebe285a140a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ecc70761-8f69-48f6-8e81-7d2ba3728c70', 'attached_at': '', 'detached_at': '', 'volume_id': '51b1f1b3-8aeb-43ee-9480-eebe285a140a', 'serial': '51b1f1b3-8aeb-43ee-9480-eebe285a140a'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1381.369107] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0966e91b-169f-47dd-9a13-467a4ab1377b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.374263] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43ad2a62-02ca-4643-bdeb-ee7a94ea0240 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.396240] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] volume-cb4b8a20-0671-45d5-bf05-a9de0e808c88/volume-cb4b8a20-0671-45d5-bf05-a9de0e808c88.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1381.396858] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1315f6c1-5c3c-4a73-8355-0005a936277c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.418179] env[62522]: DEBUG nova.compute.provider_tree [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1381.424653] env[62522]: DEBUG oslo_vmware.api [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1381.424653] env[62522]: value = "task-2416558" [ 1381.424653] env[62522]: _type = "Task" [ 1381.424653] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.432801] env[62522]: DEBUG oslo_vmware.api [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416558, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.909069] env[62522]: DEBUG nova.objects.instance [None req-0382c045-d633-4793-9bb9-081f6ba7dc38 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lazy-loading 'flavor' on Instance uuid ecc70761-8f69-48f6-8e81-7d2ba3728c70 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1381.921030] env[62522]: DEBUG nova.scheduler.client.report [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1381.935044] env[62522]: DEBUG oslo_vmware.api [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416558, 'name': ReconfigVM_Task, 'duration_secs': 0.238453} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.935328] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Reconfigured VM instance instance-00000075 to attach disk [datastore1] volume-cb4b8a20-0671-45d5-bf05-a9de0e808c88/volume-cb4b8a20-0671-45d5-bf05-a9de0e808c88.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1381.940329] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2d7ef441-0378-455e-91b5-492bf08bf69c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.955775] env[62522]: DEBUG oslo_vmware.api [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1381.955775] env[62522]: value = "task-2416559" [ 1381.955775] env[62522]: _type = "Task" [ 1381.955775] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.963595] env[62522]: DEBUG oslo_vmware.api [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416559, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.425530] env[62522]: DEBUG oslo_concurrency.lockutils [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.224s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1382.426064] env[62522]: DEBUG nova.compute.manager [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1382.466766] env[62522]: DEBUG oslo_vmware.api [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416559, 'name': ReconfigVM_Task, 'duration_secs': 0.123966} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.467070] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489882', 'volume_id': 'cb4b8a20-0671-45d5-bf05-a9de0e808c88', 'name': 'volume-cb4b8a20-0671-45d5-bf05-a9de0e808c88', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '75ba1afc-3586-4bb0-ae7f-ebf5a794f068', 'attached_at': '', 'detached_at': '', 'volume_id': 'cb4b8a20-0671-45d5-bf05-a9de0e808c88', 'serial': 'cb4b8a20-0671-45d5-bf05-a9de0e808c88'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1382.467842] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-55fbeb9f-ac11-4f85-b1f2-f607741e19b9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.474758] env[62522]: DEBUG oslo_vmware.api [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1382.474758] env[62522]: value = "task-2416560" [ 1382.474758] env[62522]: _type = "Task" [ 1382.474758] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.482281] env[62522]: DEBUG oslo_vmware.api [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416560, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.917516] env[62522]: DEBUG oslo_concurrency.lockutils [None req-0382c045-d633-4793-9bb9-081f6ba7dc38 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "ecc70761-8f69-48f6-8e81-7d2ba3728c70" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.291s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1382.930478] env[62522]: DEBUG nova.compute.utils [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1382.932228] env[62522]: DEBUG nova.compute.manager [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1382.932356] env[62522]: DEBUG nova.network.neutron [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1382.980102] env[62522]: DEBUG nova.policy [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7e5662aa9485461c8f9e0522e7aac4d6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '125731fd2e354ee2b791909db235592b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1382.988518] env[62522]: DEBUG oslo_vmware.api [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416560, 'name': Rename_Task, 'duration_secs': 0.128499} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.988789] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1382.989041] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-001a42cc-fc5f-486d-a94a-0dd8309e8aa8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.996479] env[62522]: DEBUG oslo_vmware.api [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1382.996479] env[62522]: value = "task-2416561" [ 1382.996479] env[62522]: _type = "Task" [ 1382.996479] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.003688] env[62522]: DEBUG oslo_vmware.api [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416561, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.018573] env[62522]: INFO nova.compute.manager [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Rebuilding instance [ 1383.064553] env[62522]: DEBUG nova.compute.manager [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1383.065445] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3f3da04-6153-48c3-9dd3-dd965c0ccb57 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.220140] env[62522]: DEBUG nova.network.neutron [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Successfully created port: 69af92f4-f409-4098-b0e5-8490fc594da1 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1383.435654] env[62522]: DEBUG nova.compute.manager [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1383.506993] env[62522]: DEBUG oslo_vmware.api [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416561, 'name': PowerOnVM_Task, 'duration_secs': 0.496939} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.507324] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1383.507561] env[62522]: INFO nova.compute.manager [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Took 6.64 seconds to spawn the instance on the hypervisor. [ 1383.507751] env[62522]: DEBUG nova.compute.manager [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1383.508526] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd37f60-52ac-4ac0-bdd6-14af9d1b9075 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.901819] env[62522]: DEBUG oslo_concurrency.lockutils [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "ecc70761-8f69-48f6-8e81-7d2ba3728c70" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1383.902164] env[62522]: DEBUG oslo_concurrency.lockutils [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "ecc70761-8f69-48f6-8e81-7d2ba3728c70" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1383.902428] env[62522]: DEBUG oslo_concurrency.lockutils [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "ecc70761-8f69-48f6-8e81-7d2ba3728c70-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1383.902632] env[62522]: DEBUG oslo_concurrency.lockutils [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "ecc70761-8f69-48f6-8e81-7d2ba3728c70-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1383.902808] env[62522]: DEBUG oslo_concurrency.lockutils [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "ecc70761-8f69-48f6-8e81-7d2ba3728c70-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1383.905451] env[62522]: INFO nova.compute.manager [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Terminating instance [ 1384.025279] env[62522]: INFO nova.compute.manager [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Took 13.23 seconds to build instance. [ 1384.078858] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1384.079183] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-43a7a171-4113-4a78-9c22-258524d707c6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.086700] env[62522]: DEBUG oslo_vmware.api [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1384.086700] env[62522]: value = "task-2416562" [ 1384.086700] env[62522]: _type = "Task" [ 1384.086700] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.094309] env[62522]: DEBUG oslo_vmware.api [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416562, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.413017] env[62522]: DEBUG nova.compute.manager [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1384.413017] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1384.413017] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ab5796-657a-425e-b69f-2483499582d3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.422201] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1384.422201] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bcdf0480-658f-4e0a-a268-ae0ee2fc5864 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.428047] env[62522]: DEBUG oslo_vmware.api [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1384.428047] env[62522]: value = "task-2416563" [ 1384.428047] env[62522]: _type = "Task" [ 1384.428047] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.436689] env[62522]: DEBUG oslo_vmware.api [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416563, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.444895] env[62522]: DEBUG nova.compute.manager [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1384.480396] env[62522]: DEBUG nova.virt.hardware [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1384.480670] env[62522]: DEBUG nova.virt.hardware [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1384.480832] env[62522]: DEBUG nova.virt.hardware [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1384.481023] env[62522]: DEBUG nova.virt.hardware [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1384.481288] env[62522]: DEBUG nova.virt.hardware [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1384.481388] env[62522]: DEBUG nova.virt.hardware [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1384.481539] env[62522]: DEBUG nova.virt.hardware [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1384.481743] env[62522]: DEBUG nova.virt.hardware [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1384.481925] env[62522]: DEBUG nova.virt.hardware [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1384.482105] env[62522]: DEBUG nova.virt.hardware [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1384.482289] env[62522]: DEBUG nova.virt.hardware [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1384.483184] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-774e0966-8ba9-4d51-bcdc-53e63801519d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.494705] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17a055ab-74f9-4c44-b9ca-8982f3742c6a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.528757] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ddfadb67-5639-446a-ae37-e10fa157e617 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "75ba1afc-3586-4bb0-ae7f-ebf5a794f068" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.745s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1384.598493] env[62522]: DEBUG oslo_vmware.api [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416562, 'name': PowerOffVM_Task, 'duration_secs': 0.206444} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.598785] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1384.599024] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1384.599810] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44419fcf-b460-4c5d-97b7-9553be244b0f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.608718] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1384.608998] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-37b3568d-c433-48ac-9c6a-e66e22125ba5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.677713] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1384.678073] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1384.679159] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Deleting the datastore file [datastore2] fd9af7c3-358e-417f-97f4-fd2d67d21300 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1384.682254] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-47127645-4224-48d2-b1b3-0c7c6d6713ba {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.685810] env[62522]: DEBUG nova.compute.manager [req-78e02188-8eb5-4646-af0a-0a972e8069e5 req-d7be8f09-c7ea-44ee-b02d-ec1a2af33c84 service nova] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Received event network-changed-58444651-b47b-44d5-b240-53949c79df86 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1384.686263] env[62522]: DEBUG nova.compute.manager [req-78e02188-8eb5-4646-af0a-0a972e8069e5 req-d7be8f09-c7ea-44ee-b02d-ec1a2af33c84 service nova] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Refreshing instance network info cache due to event network-changed-58444651-b47b-44d5-b240-53949c79df86. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1384.686459] env[62522]: DEBUG oslo_concurrency.lockutils [req-78e02188-8eb5-4646-af0a-0a972e8069e5 req-d7be8f09-c7ea-44ee-b02d-ec1a2af33c84 service nova] Acquiring lock "refresh_cache-b31195c2-29f4-475c-baa7-fcb4791b7278" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1384.686703] env[62522]: DEBUG oslo_concurrency.lockutils [req-78e02188-8eb5-4646-af0a-0a972e8069e5 req-d7be8f09-c7ea-44ee-b02d-ec1a2af33c84 service nova] Acquired lock "refresh_cache-b31195c2-29f4-475c-baa7-fcb4791b7278" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1384.686958] env[62522]: DEBUG nova.network.neutron [req-78e02188-8eb5-4646-af0a-0a972e8069e5 req-d7be8f09-c7ea-44ee-b02d-ec1a2af33c84 service nova] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Refreshing network info cache for port 58444651-b47b-44d5-b240-53949c79df86 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1384.695228] env[62522]: DEBUG oslo_vmware.api [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1384.695228] env[62522]: value = "task-2416565" [ 1384.695228] env[62522]: _type = "Task" [ 1384.695228] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.713896] env[62522]: DEBUG oslo_vmware.api [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416565, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.938358] env[62522]: DEBUG oslo_vmware.api [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416563, 'name': PowerOffVM_Task, 'duration_secs': 0.234257} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.938358] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1384.939186] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1384.939624] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-feed3c88-3fd6-4809-a28d-a289f2c6b019 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.006022] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1385.006022] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1385.006022] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Deleting the datastore file [datastore2] ecc70761-8f69-48f6-8e81-7d2ba3728c70 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1385.006022] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ae8aaa5a-4fc0-4735-9c65-fe02a74f871c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.012159] env[62522]: DEBUG oslo_vmware.api [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1385.012159] env[62522]: value = "task-2416567" [ 1385.012159] env[62522]: _type = "Task" [ 1385.012159] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.025756] env[62522]: DEBUG oslo_vmware.api [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416567, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.031016] env[62522]: DEBUG nova.network.neutron [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Successfully updated port: 69af92f4-f409-4098-b0e5-8490fc594da1 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1385.205471] env[62522]: DEBUG oslo_vmware.api [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416565, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14011} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.205920] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1385.205920] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1385.206514] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1385.399661] env[62522]: DEBUG nova.network.neutron [req-78e02188-8eb5-4646-af0a-0a972e8069e5 req-d7be8f09-c7ea-44ee-b02d-ec1a2af33c84 service nova] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Updated VIF entry in instance network info cache for port 58444651-b47b-44d5-b240-53949c79df86. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1385.400058] env[62522]: DEBUG nova.network.neutron [req-78e02188-8eb5-4646-af0a-0a972e8069e5 req-d7be8f09-c7ea-44ee-b02d-ec1a2af33c84 service nova] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Updating instance_info_cache with network_info: [{"id": "58444651-b47b-44d5-b240-53949c79df86", "address": "fa:16:3e:6c:81:68", "network": {"id": "fd993a8b-571c-4873-a5d6-4d8c60e23d38", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2084093882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "071dd4c295a54e388099d5bf0f4e300b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58444651-b4", "ovs_interfaceid": "58444651-b47b-44d5-b240-53949c79df86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1385.524548] env[62522]: DEBUG oslo_vmware.api [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416567, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147519} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.524830] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1385.525031] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1385.525236] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1385.525427] env[62522]: INFO nova.compute.manager [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1385.525674] env[62522]: DEBUG oslo.service.loopingcall [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1385.525866] env[62522]: DEBUG nova.compute.manager [-] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1385.525969] env[62522]: DEBUG nova.network.neutron [-] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1385.536359] env[62522]: DEBUG oslo_concurrency.lockutils [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Acquiring lock "refresh_cache-c464ae64-056f-4629-add9-2ff7a1971ebb" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1385.536502] env[62522]: DEBUG oslo_concurrency.lockutils [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Acquired lock "refresh_cache-c464ae64-056f-4629-add9-2ff7a1971ebb" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1385.536745] env[62522]: DEBUG nova.network.neutron [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1385.903696] env[62522]: DEBUG oslo_concurrency.lockutils [req-78e02188-8eb5-4646-af0a-0a972e8069e5 req-d7be8f09-c7ea-44ee-b02d-ec1a2af33c84 service nova] Releasing lock "refresh_cache-b31195c2-29f4-475c-baa7-fcb4791b7278" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1386.074085] env[62522]: DEBUG nova.network.neutron [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1386.256896] env[62522]: DEBUG nova.virt.hardware [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1386.257232] env[62522]: DEBUG nova.virt.hardware [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1386.257483] env[62522]: DEBUG nova.virt.hardware [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1386.257936] env[62522]: DEBUG nova.virt.hardware [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1386.258175] env[62522]: DEBUG nova.virt.hardware [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1386.258424] env[62522]: DEBUG nova.virt.hardware [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1386.258900] env[62522]: DEBUG nova.virt.hardware [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1386.259111] env[62522]: DEBUG nova.virt.hardware [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1386.259367] env[62522]: DEBUG nova.virt.hardware [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1386.259551] env[62522]: DEBUG nova.virt.hardware [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1386.259799] env[62522]: DEBUG nova.virt.hardware [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1386.260871] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7945068c-139d-42aa-89ef-4d7c4b0f94a2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.266661] env[62522]: DEBUG nova.network.neutron [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Updating instance_info_cache with network_info: [{"id": "69af92f4-f409-4098-b0e5-8490fc594da1", "address": "fa:16:3e:f4:4d:fd", "network": {"id": "696372d3-0040-435b-9358-8f209aa08aff", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1721544615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "125731fd2e354ee2b791909db235592b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac4015e0-e5e7-4b3f-8d8e-ef4501eea9aa", "external-id": "nsx-vlan-transportzone-132", "segmentation_id": 132, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69af92f4-f4", "ovs_interfaceid": "69af92f4-f409-4098-b0e5-8490fc594da1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1386.270962] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a95227-5be0-4ee6-a082-b051baab642b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.286879] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b5:10:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f625f389-b7cf-49b9-998a-87f3a9e3f234', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2c6f1904-1976-45eb-9380-5262c08450d2', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1386.295237] env[62522]: DEBUG oslo.service.loopingcall [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1386.296228] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1386.296504] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7ab32d00-da7c-4c2f-b4a8-6f2a8792f0d7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.318225] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1386.318225] env[62522]: value = "task-2416568" [ 1386.318225] env[62522]: _type = "Task" [ 1386.318225] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.327633] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416568, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.384103] env[62522]: DEBUG nova.compute.manager [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Stashing vm_state: active {{(pid=62522) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1386.514844] env[62522]: DEBUG nova.network.neutron [-] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1386.736765] env[62522]: DEBUG nova.compute.manager [req-7d2f9ca9-7d2b-4fc7-a580-bffba905538d req-cece7613-25af-4370-a217-2604ec8a268b service nova] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Received event network-changed-5a9957c5-133c-4c1b-8469-dbd152ccd386 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1386.737063] env[62522]: DEBUG nova.compute.manager [req-7d2f9ca9-7d2b-4fc7-a580-bffba905538d req-cece7613-25af-4370-a217-2604ec8a268b service nova] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Refreshing instance network info cache due to event network-changed-5a9957c5-133c-4c1b-8469-dbd152ccd386. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1386.737187] env[62522]: DEBUG oslo_concurrency.lockutils [req-7d2f9ca9-7d2b-4fc7-a580-bffba905538d req-cece7613-25af-4370-a217-2604ec8a268b service nova] Acquiring lock "refresh_cache-75ba1afc-3586-4bb0-ae7f-ebf5a794f068" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1386.737334] env[62522]: DEBUG oslo_concurrency.lockutils [req-7d2f9ca9-7d2b-4fc7-a580-bffba905538d req-cece7613-25af-4370-a217-2604ec8a268b service nova] Acquired lock "refresh_cache-75ba1afc-3586-4bb0-ae7f-ebf5a794f068" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1386.737495] env[62522]: DEBUG nova.network.neutron [req-7d2f9ca9-7d2b-4fc7-a580-bffba905538d req-cece7613-25af-4370-a217-2604ec8a268b service nova] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Refreshing network info cache for port 5a9957c5-133c-4c1b-8469-dbd152ccd386 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1386.776901] env[62522]: DEBUG oslo_concurrency.lockutils [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Releasing lock "refresh_cache-c464ae64-056f-4629-add9-2ff7a1971ebb" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1386.777149] env[62522]: DEBUG nova.compute.manager [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Instance network_info: |[{"id": "69af92f4-f409-4098-b0e5-8490fc594da1", "address": "fa:16:3e:f4:4d:fd", "network": {"id": "696372d3-0040-435b-9358-8f209aa08aff", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1721544615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "125731fd2e354ee2b791909db235592b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac4015e0-e5e7-4b3f-8d8e-ef4501eea9aa", "external-id": "nsx-vlan-transportzone-132", "segmentation_id": 132, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69af92f4-f4", "ovs_interfaceid": "69af92f4-f409-4098-b0e5-8490fc594da1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1386.777592] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:4d:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac4015e0-e5e7-4b3f-8d8e-ef4501eea9aa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '69af92f4-f409-4098-b0e5-8490fc594da1', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1386.785130] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Creating folder: Project (125731fd2e354ee2b791909db235592b). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1386.785662] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2fa93acc-d742-4d68-b04a-9f14dc6369b8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.797813] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Created folder: Project (125731fd2e354ee2b791909db235592b) in parent group-v489562. [ 1386.797988] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Creating folder: Instances. Parent ref: group-v489886. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1386.798215] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bf791bde-18b9-433f-9cfc-8d91ba7dacda {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.806653] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Created folder: Instances in parent group-v489886. [ 1386.806864] env[62522]: DEBUG oslo.service.loopingcall [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1386.807049] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1386.807239] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e30876b7-3456-44b6-ab7c-61dabfeea1ec {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.828348] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416568, 'name': CreateVM_Task, 'duration_secs': 0.328672} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.829331] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1386.829521] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1386.829521] env[62522]: value = "task-2416571" [ 1386.829521] env[62522]: _type = "Task" [ 1386.829521] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.830103] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1386.830268] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1386.830578] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1386.830826] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f72a8a6c-b3db-4e3b-ba60-279226479d22 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.837275] env[62522]: DEBUG oslo_vmware.api [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1386.837275] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521a87ae-a3d1-f396-f7b0-a4c8dcdf5fe3" [ 1386.837275] env[62522]: _type = "Task" [ 1386.837275] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.840195] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416571, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.846837] env[62522]: DEBUG oslo_vmware.api [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521a87ae-a3d1-f396-f7b0-a4c8dcdf5fe3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.906030] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1386.906322] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1387.018506] env[62522]: INFO nova.compute.manager [-] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Took 1.49 seconds to deallocate network for instance. [ 1387.340648] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416571, 'name': CreateVM_Task, 'duration_secs': 0.312147} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.345818] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1387.346526] env[62522]: DEBUG oslo_concurrency.lockutils [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1387.346700] env[62522]: DEBUG oslo_concurrency.lockutils [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1387.347013] env[62522]: DEBUG oslo_concurrency.lockutils [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1387.347833] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-726418c4-3633-40d0-91a3-e1ac698d4c44 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.353564] env[62522]: DEBUG oslo_vmware.api [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521a87ae-a3d1-f396-f7b0-a4c8dcdf5fe3, 'name': SearchDatastore_Task, 'duration_secs': 0.024953} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.354820] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1387.355090] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1387.355462] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1387.359058] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1387.359058] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1387.359058] env[62522]: DEBUG oslo_vmware.api [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Waiting for the task: (returnval){ [ 1387.359058] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e2cba3-21e0-a895-596c-a4b1a28c0f90" [ 1387.359058] env[62522]: _type = "Task" [ 1387.359058] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.359058] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-678963eb-c598-470a-87cd-a02797cb81da {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.366055] env[62522]: DEBUG oslo_vmware.api [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e2cba3-21e0-a895-596c-a4b1a28c0f90, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.367239] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1387.367430] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1387.368161] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40353c55-5f77-4ea5-a5a7-250ff725fae0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.372917] env[62522]: DEBUG oslo_vmware.api [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1387.372917] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]523f5c01-b08b-6d45-c897-756c5dbd1909" [ 1387.372917] env[62522]: _type = "Task" [ 1387.372917] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.380869] env[62522]: DEBUG oslo_vmware.api [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]523f5c01-b08b-6d45-c897-756c5dbd1909, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.411735] env[62522]: INFO nova.compute.claims [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1387.524935] env[62522]: DEBUG oslo_concurrency.lockutils [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1387.529596] env[62522]: DEBUG nova.network.neutron [req-7d2f9ca9-7d2b-4fc7-a580-bffba905538d req-cece7613-25af-4370-a217-2604ec8a268b service nova] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Updated VIF entry in instance network info cache for port 5a9957c5-133c-4c1b-8469-dbd152ccd386. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1387.529935] env[62522]: DEBUG nova.network.neutron [req-7d2f9ca9-7d2b-4fc7-a580-bffba905538d req-cece7613-25af-4370-a217-2604ec8a268b service nova] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Updating instance_info_cache with network_info: [{"id": "5a9957c5-133c-4c1b-8469-dbd152ccd386", "address": "fa:16:3e:52:7c:d9", "network": {"id": "fd993a8b-571c-4873-a5d6-4d8c60e23d38", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2084093882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "071dd4c295a54e388099d5bf0f4e300b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a9957c5-13", "ovs_interfaceid": "5a9957c5-133c-4c1b-8469-dbd152ccd386", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1387.868544] env[62522]: DEBUG oslo_vmware.api [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52e2cba3-21e0-a895-596c-a4b1a28c0f90, 'name': SearchDatastore_Task, 'duration_secs': 0.016343} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.868976] env[62522]: DEBUG oslo_concurrency.lockutils [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1387.869093] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1387.869330] env[62522]: DEBUG oslo_concurrency.lockutils [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1387.869479] env[62522]: DEBUG oslo_concurrency.lockutils [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1387.869655] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1387.869915] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3bee85d5-6b84-4ac2-b2ee-8d8de469e86f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.880795] env[62522]: DEBUG oslo_vmware.api [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]523f5c01-b08b-6d45-c897-756c5dbd1909, 'name': SearchDatastore_Task, 'duration_secs': 0.01985} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.882207] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1387.882390] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1387.883043] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cae232c1-7fff-46c4-a44d-29592c2e59fa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.885099] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd5944b7-be75-4936-b567-9c71af40fd57 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.889585] env[62522]: DEBUG oslo_vmware.api [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1387.889585] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52fc07bd-fa3b-ef67-add8-833b651f0e3a" [ 1387.889585] env[62522]: _type = "Task" [ 1387.889585] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.891225] env[62522]: DEBUG oslo_vmware.api [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Waiting for the task: (returnval){ [ 1387.891225] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b4a036-1c76-1d9d-5305-7ccbc8c28d9a" [ 1387.891225] env[62522]: _type = "Task" [ 1387.891225] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.900535] env[62522]: DEBUG oslo_vmware.api [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52fc07bd-fa3b-ef67-add8-833b651f0e3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.903147] env[62522]: DEBUG oslo_vmware.api [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b4a036-1c76-1d9d-5305-7ccbc8c28d9a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.917726] env[62522]: INFO nova.compute.resource_tracker [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Updating resource usage from migration 2d33b26d-c265-410c-aac8-0ac47c5bd0e7 [ 1388.021041] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85533720-1ef9-4f69-a12d-1d5855798731 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.028545] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a37d32f-edb2-4eb8-9bc0-ab491e5042e4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.031969] env[62522]: DEBUG oslo_concurrency.lockutils [req-7d2f9ca9-7d2b-4fc7-a580-bffba905538d req-cece7613-25af-4370-a217-2604ec8a268b service nova] Releasing lock "refresh_cache-75ba1afc-3586-4bb0-ae7f-ebf5a794f068" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1388.032213] env[62522]: DEBUG nova.compute.manager [req-7d2f9ca9-7d2b-4fc7-a580-bffba905538d req-cece7613-25af-4370-a217-2604ec8a268b service nova] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Received event network-vif-plugged-69af92f4-f409-4098-b0e5-8490fc594da1 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1388.032406] env[62522]: DEBUG oslo_concurrency.lockutils [req-7d2f9ca9-7d2b-4fc7-a580-bffba905538d req-cece7613-25af-4370-a217-2604ec8a268b service nova] Acquiring lock "c464ae64-056f-4629-add9-2ff7a1971ebb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1388.032605] env[62522]: DEBUG oslo_concurrency.lockutils [req-7d2f9ca9-7d2b-4fc7-a580-bffba905538d req-cece7613-25af-4370-a217-2604ec8a268b service nova] Lock "c464ae64-056f-4629-add9-2ff7a1971ebb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1388.032766] env[62522]: DEBUG oslo_concurrency.lockutils [req-7d2f9ca9-7d2b-4fc7-a580-bffba905538d req-cece7613-25af-4370-a217-2604ec8a268b service nova] Lock "c464ae64-056f-4629-add9-2ff7a1971ebb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1388.032933] env[62522]: DEBUG nova.compute.manager [req-7d2f9ca9-7d2b-4fc7-a580-bffba905538d req-cece7613-25af-4370-a217-2604ec8a268b service nova] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] No waiting events found dispatching network-vif-plugged-69af92f4-f409-4098-b0e5-8490fc594da1 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1388.033127] env[62522]: WARNING nova.compute.manager [req-7d2f9ca9-7d2b-4fc7-a580-bffba905538d req-cece7613-25af-4370-a217-2604ec8a268b service nova] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Received unexpected event network-vif-plugged-69af92f4-f409-4098-b0e5-8490fc594da1 for instance with vm_state building and task_state spawning. [ 1388.033308] env[62522]: DEBUG nova.compute.manager [req-7d2f9ca9-7d2b-4fc7-a580-bffba905538d req-cece7613-25af-4370-a217-2604ec8a268b service nova] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Received event network-changed-69af92f4-f409-4098-b0e5-8490fc594da1 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1388.033461] env[62522]: DEBUG nova.compute.manager [req-7d2f9ca9-7d2b-4fc7-a580-bffba905538d req-cece7613-25af-4370-a217-2604ec8a268b service nova] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Refreshing instance network info cache due to event network-changed-69af92f4-f409-4098-b0e5-8490fc594da1. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1388.033666] env[62522]: DEBUG oslo_concurrency.lockutils [req-7d2f9ca9-7d2b-4fc7-a580-bffba905538d req-cece7613-25af-4370-a217-2604ec8a268b service nova] Acquiring lock "refresh_cache-c464ae64-056f-4629-add9-2ff7a1971ebb" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1388.033782] env[62522]: DEBUG oslo_concurrency.lockutils [req-7d2f9ca9-7d2b-4fc7-a580-bffba905538d req-cece7613-25af-4370-a217-2604ec8a268b service nova] Acquired lock "refresh_cache-c464ae64-056f-4629-add9-2ff7a1971ebb" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1388.033929] env[62522]: DEBUG nova.network.neutron [req-7d2f9ca9-7d2b-4fc7-a580-bffba905538d req-cece7613-25af-4370-a217-2604ec8a268b service nova] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Refreshing network info cache for port 69af92f4-f409-4098-b0e5-8490fc594da1 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1388.061461] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-846ada8b-ad04-4751-bf56-876cbca864d2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.069010] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79f8e0c8-34eb-4138-93cf-9d4ad70fb1ae {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.081930] env[62522]: DEBUG nova.compute.provider_tree [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1388.300431] env[62522]: DEBUG nova.network.neutron [req-7d2f9ca9-7d2b-4fc7-a580-bffba905538d req-cece7613-25af-4370-a217-2604ec8a268b service nova] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Updated VIF entry in instance network info cache for port 69af92f4-f409-4098-b0e5-8490fc594da1. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1388.300871] env[62522]: DEBUG nova.network.neutron [req-7d2f9ca9-7d2b-4fc7-a580-bffba905538d req-cece7613-25af-4370-a217-2604ec8a268b service nova] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Updating instance_info_cache with network_info: [{"id": "69af92f4-f409-4098-b0e5-8490fc594da1", "address": "fa:16:3e:f4:4d:fd", "network": {"id": "696372d3-0040-435b-9358-8f209aa08aff", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1721544615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "125731fd2e354ee2b791909db235592b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac4015e0-e5e7-4b3f-8d8e-ef4501eea9aa", "external-id": "nsx-vlan-transportzone-132", "segmentation_id": 132, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69af92f4-f4", "ovs_interfaceid": "69af92f4-f409-4098-b0e5-8490fc594da1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1388.402982] env[62522]: DEBUG oslo_vmware.api [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52b4a036-1c76-1d9d-5305-7ccbc8c28d9a, 'name': SearchDatastore_Task, 'duration_secs': 0.042117} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.406871] env[62522]: DEBUG oslo_vmware.api [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52fc07bd-fa3b-ef67-add8-833b651f0e3a, 'name': SearchDatastore_Task, 'duration_secs': 0.013722} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.407089] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf991f97-264c-4929-bbd1-fadadfb4eca2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.409124] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1388.409382] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] fd9af7c3-358e-417f-97f4-fd2d67d21300/fd9af7c3-358e-417f-97f4-fd2d67d21300.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1388.409613] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3f9ee19a-48b5-4f53-9a75-fe670e80fdd3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.414153] env[62522]: DEBUG oslo_vmware.api [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Waiting for the task: (returnval){ [ 1388.414153] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d6226c-41f0-cc3c-4022-10124ef77455" [ 1388.414153] env[62522]: _type = "Task" [ 1388.414153] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.417763] env[62522]: DEBUG oslo_vmware.api [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1388.417763] env[62522]: value = "task-2416572" [ 1388.417763] env[62522]: _type = "Task" [ 1388.417763] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.423432] env[62522]: DEBUG oslo_vmware.api [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d6226c-41f0-cc3c-4022-10124ef77455, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.428297] env[62522]: DEBUG oslo_vmware.api [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416572, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.584578] env[62522]: DEBUG nova.scheduler.client.report [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1388.804036] env[62522]: DEBUG oslo_concurrency.lockutils [req-7d2f9ca9-7d2b-4fc7-a580-bffba905538d req-cece7613-25af-4370-a217-2604ec8a268b service nova] Releasing lock "refresh_cache-c464ae64-056f-4629-add9-2ff7a1971ebb" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1388.804354] env[62522]: DEBUG nova.compute.manager [req-7d2f9ca9-7d2b-4fc7-a580-bffba905538d req-cece7613-25af-4370-a217-2604ec8a268b service nova] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Received event network-vif-deleted-0ae0220d-f10b-4927-b124-35afaa7bc701 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1388.924992] env[62522]: DEBUG oslo_vmware.api [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d6226c-41f0-cc3c-4022-10124ef77455, 'name': SearchDatastore_Task, 'duration_secs': 0.019893} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.925586] env[62522]: DEBUG oslo_concurrency.lockutils [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1388.925857] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] c464ae64-056f-4629-add9-2ff7a1971ebb/c464ae64-056f-4629-add9-2ff7a1971ebb.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1388.926151] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e7df7196-983e-490d-91cf-7b0467e89e3c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.930930] env[62522]: DEBUG oslo_vmware.api [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416572, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.485506} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.931443] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] fd9af7c3-358e-417f-97f4-fd2d67d21300/fd9af7c3-358e-417f-97f4-fd2d67d21300.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1388.931742] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1388.931972] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5ed1ac18-bf07-4b01-ba7c-ebaf9d451ec8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.935465] env[62522]: DEBUG oslo_vmware.api [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Waiting for the task: (returnval){ [ 1388.935465] env[62522]: value = "task-2416573" [ 1388.935465] env[62522]: _type = "Task" [ 1388.935465] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.939197] env[62522]: DEBUG oslo_vmware.api [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1388.939197] env[62522]: value = "task-2416574" [ 1388.939197] env[62522]: _type = "Task" [ 1388.939197] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.945746] env[62522]: DEBUG oslo_vmware.api [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Task: {'id': task-2416573, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.951274] env[62522]: DEBUG oslo_vmware.api [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416574, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.089018] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.183s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1389.089274] env[62522]: INFO nova.compute.manager [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Migrating [ 1389.095683] env[62522]: DEBUG oslo_concurrency.lockutils [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.571s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1389.095929] env[62522]: DEBUG nova.objects.instance [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lazy-loading 'resources' on Instance uuid ecc70761-8f69-48f6-8e81-7d2ba3728c70 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1389.224511] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5301b24e-5964-404d-a613-8ad2c00dd1ee {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.232921] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ff073ca-5798-4a51-b0db-8ec67162e32c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.265769] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1a6de5f-e5f4-4932-ac67-bc9b68bbd5ed {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.274087] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f8d7abd-89f4-476c-bc06-70bdd5e0675a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.288459] env[62522]: DEBUG nova.compute.provider_tree [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1389.445756] env[62522]: DEBUG oslo_vmware.api [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Task: {'id': task-2416573, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.450959} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.446361] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] c464ae64-056f-4629-add9-2ff7a1971ebb/c464ae64-056f-4629-add9-2ff7a1971ebb.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1389.446589] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1389.446826] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-80df6f85-c53a-4eec-9dfd-c08b8cb1a508 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.450869] env[62522]: DEBUG oslo_vmware.api [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416574, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063865} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.451407] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1389.452126] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85c1d171-7d3e-4a01-adb4-5b218c62a6e6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.455467] env[62522]: DEBUG oslo_vmware.api [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Waiting for the task: (returnval){ [ 1389.455467] env[62522]: value = "task-2416575" [ 1389.455467] env[62522]: _type = "Task" [ 1389.455467] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.476366] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] fd9af7c3-358e-417f-97f4-fd2d67d21300/fd9af7c3-358e-417f-97f4-fd2d67d21300.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1389.477166] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ffe89500-4b53-4728-a7b6-765036b4cb5b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.494085] env[62522]: DEBUG oslo_vmware.api [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Task: {'id': task-2416575, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.498577] env[62522]: DEBUG oslo_vmware.api [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1389.498577] env[62522]: value = "task-2416576" [ 1389.498577] env[62522]: _type = "Task" [ 1389.498577] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.505911] env[62522]: DEBUG oslo_vmware.api [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416576, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.608326] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "refresh_cache-75ba1afc-3586-4bb0-ae7f-ebf5a794f068" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1389.608525] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquired lock "refresh_cache-75ba1afc-3586-4bb0-ae7f-ebf5a794f068" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1389.608701] env[62522]: DEBUG nova.network.neutron [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1389.791646] env[62522]: DEBUG nova.scheduler.client.report [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1389.964577] env[62522]: DEBUG oslo_vmware.api [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Task: {'id': task-2416575, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063665} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.965079] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1389.965825] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-480aa29e-7acf-4959-b6e2-f43acf687edc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.986821] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] c464ae64-056f-4629-add9-2ff7a1971ebb/c464ae64-056f-4629-add9-2ff7a1971ebb.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1389.987056] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce46b732-92cf-4505-9482-dd5330cf5100 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.008063] env[62522]: DEBUG oslo_vmware.api [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416576, 'name': ReconfigVM_Task, 'duration_secs': 0.28048} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.009180] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Reconfigured VM instance instance-00000073 to attach disk [datastore2] fd9af7c3-358e-417f-97f4-fd2d67d21300/fd9af7c3-358e-417f-97f4-fd2d67d21300.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1390.009821] env[62522]: DEBUG oslo_vmware.api [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Waiting for the task: (returnval){ [ 1390.009821] env[62522]: value = "task-2416577" [ 1390.009821] env[62522]: _type = "Task" [ 1390.009821] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.010012] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ede3732a-27ca-48a2-907f-1f5956d1a466 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.020275] env[62522]: DEBUG oslo_vmware.api [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Task: {'id': task-2416577, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.021371] env[62522]: DEBUG oslo_vmware.api [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1390.021371] env[62522]: value = "task-2416578" [ 1390.021371] env[62522]: _type = "Task" [ 1390.021371] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.031150] env[62522]: DEBUG oslo_vmware.api [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416578, 'name': Rename_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.296332] env[62522]: DEBUG oslo_concurrency.lockutils [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.201s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1390.319958] env[62522]: INFO nova.scheduler.client.report [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Deleted allocations for instance ecc70761-8f69-48f6-8e81-7d2ba3728c70 [ 1390.336227] env[62522]: DEBUG nova.network.neutron [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Updating instance_info_cache with network_info: [{"id": "5a9957c5-133c-4c1b-8469-dbd152ccd386", "address": "fa:16:3e:52:7c:d9", "network": {"id": "fd993a8b-571c-4873-a5d6-4d8c60e23d38", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2084093882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "071dd4c295a54e388099d5bf0f4e300b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a9957c5-13", "ovs_interfaceid": "5a9957c5-133c-4c1b-8469-dbd152ccd386", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1390.521370] env[62522]: DEBUG oslo_vmware.api [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Task: {'id': task-2416577, 'name': ReconfigVM_Task, 'duration_secs': 0.303688} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.521693] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Reconfigured VM instance instance-00000076 to attach disk [datastore1] c464ae64-056f-4629-add9-2ff7a1971ebb/c464ae64-056f-4629-add9-2ff7a1971ebb.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1390.522391] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-316e878c-c5b3-4f88-84fb-91431f82d1db {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.531638] env[62522]: DEBUG oslo_vmware.api [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416578, 'name': Rename_Task, 'duration_secs': 0.13822} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.532795] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1390.533112] env[62522]: DEBUG oslo_vmware.api [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Waiting for the task: (returnval){ [ 1390.533112] env[62522]: value = "task-2416579" [ 1390.533112] env[62522]: _type = "Task" [ 1390.533112] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.533285] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4ad703d9-58a3-4286-83f6-73ae5ffccb5f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.542016] env[62522]: DEBUG oslo_vmware.api [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Task: {'id': task-2416579, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.543147] env[62522]: DEBUG oslo_vmware.api [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1390.543147] env[62522]: value = "task-2416580" [ 1390.543147] env[62522]: _type = "Task" [ 1390.543147] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.550713] env[62522]: DEBUG oslo_vmware.api [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416580, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.828839] env[62522]: DEBUG oslo_concurrency.lockutils [None req-811bc3cd-1be7-48f9-ab2f-ab661e7cd027 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "ecc70761-8f69-48f6-8e81-7d2ba3728c70" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.927s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1390.838553] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Releasing lock "refresh_cache-75ba1afc-3586-4bb0-ae7f-ebf5a794f068" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1391.044312] env[62522]: DEBUG oslo_vmware.api [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Task: {'id': task-2416579, 'name': Rename_Task, 'duration_secs': 0.209177} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.047388] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1391.047721] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9493fd76-125e-4cb5-a460-8e151c8f8a80 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.055175] env[62522]: DEBUG oslo_vmware.api [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416580, 'name': PowerOnVM_Task, 'duration_secs': 0.49546} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.056404] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1391.056599] env[62522]: DEBUG nova.compute.manager [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1391.056915] env[62522]: DEBUG oslo_vmware.api [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Waiting for the task: (returnval){ [ 1391.056915] env[62522]: value = "task-2416581" [ 1391.056915] env[62522]: _type = "Task" [ 1391.056915] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.057607] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c30885-fcf8-476e-840d-64a98aa09f70 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.070734] env[62522]: DEBUG oslo_vmware.api [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Task: {'id': task-2416581, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.570554] env[62522]: DEBUG oslo_vmware.api [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Task: {'id': task-2416581, 'name': PowerOnVM_Task, 'duration_secs': 0.501249} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.570756] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1391.570999] env[62522]: INFO nova.compute.manager [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Took 7.13 seconds to spawn the instance on the hypervisor. [ 1391.571201] env[62522]: DEBUG nova.compute.manager [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1391.576283] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8596a6a-8803-4437-9db2-fa15fc722fd5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.580987] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1391.581274] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1391.581453] env[62522]: DEBUG nova.objects.instance [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62522) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1392.098884] env[62522]: INFO nova.compute.manager [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Took 11.93 seconds to build instance. [ 1392.353064] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8acc304e-04e1-4203-b143-f5e8544ed302 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.372172] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Updating instance '75ba1afc-3586-4bb0-ae7f-ebf5a794f068' progress to 0 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1392.595963] env[62522]: DEBUG oslo_concurrency.lockutils [None req-e21328e6-367a-4825-82a7-9dc1fdd830f2 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1392.601429] env[62522]: DEBUG oslo_concurrency.lockutils [None req-318ca1ef-6967-41ff-8815-bb5cb56d9339 tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Lock "c464ae64-056f-4629-add9-2ff7a1971ebb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.441s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1392.682192] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Acquiring lock "c464ae64-056f-4629-add9-2ff7a1971ebb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1392.682453] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Lock "c464ae64-056f-4629-add9-2ff7a1971ebb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1392.682671] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Acquiring lock "c464ae64-056f-4629-add9-2ff7a1971ebb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1392.682854] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Lock "c464ae64-056f-4629-add9-2ff7a1971ebb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1392.683043] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Lock "c464ae64-056f-4629-add9-2ff7a1971ebb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1392.685160] env[62522]: INFO nova.compute.manager [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Terminating instance [ 1392.878274] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1392.878594] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-43585517-4831-453e-ba0b-95f19872e70d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.885929] env[62522]: DEBUG oslo_vmware.api [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1392.885929] env[62522]: value = "task-2416582" [ 1392.885929] env[62522]: _type = "Task" [ 1392.885929] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.893984] env[62522]: DEBUG oslo_vmware.api [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416582, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.188989] env[62522]: DEBUG nova.compute.manager [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1393.189348] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1393.190133] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51bf5f38-5b37-4c58-bb57-3150adb79f35 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.197526] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1393.197764] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7564a7e6-abc3-438c-abec-7a53fe1dde55 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.203447] env[62522]: DEBUG oslo_vmware.api [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Waiting for the task: (returnval){ [ 1393.203447] env[62522]: value = "task-2416583" [ 1393.203447] env[62522]: _type = "Task" [ 1393.203447] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.210779] env[62522]: DEBUG oslo_vmware.api [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Task: {'id': task-2416583, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.396804] env[62522]: DEBUG oslo_concurrency.lockutils [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "d4f56d59-e03f-4eaf-aa2d-b77241e13be3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1393.396804] env[62522]: DEBUG oslo_concurrency.lockutils [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "d4f56d59-e03f-4eaf-aa2d-b77241e13be3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1393.401465] env[62522]: DEBUG oslo_vmware.api [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416582, 'name': PowerOffVM_Task, 'duration_secs': 0.207259} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.401757] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1393.401981] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Updating instance '75ba1afc-3586-4bb0-ae7f-ebf5a794f068' progress to 17 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1393.713993] env[62522]: DEBUG oslo_vmware.api [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Task: {'id': task-2416583, 'name': PowerOffVM_Task, 'duration_secs': 0.1949} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.715470] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1393.715470] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1393.715470] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-36255aef-acb7-42e0-87ea-d6e381ab8efd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.783698] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1393.783899] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1393.783899] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Deleting the datastore file [datastore1] c464ae64-056f-4629-add9-2ff7a1971ebb {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1393.784208] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-56111438-f7bd-407f-8414-a44145224016 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.790185] env[62522]: DEBUG oslo_vmware.api [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Waiting for the task: (returnval){ [ 1393.790185] env[62522]: value = "task-2416585" [ 1393.790185] env[62522]: _type = "Task" [ 1393.790185] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.797615] env[62522]: DEBUG oslo_vmware.api [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Task: {'id': task-2416585, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.899389] env[62522]: DEBUG nova.compute.manager [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1393.908560] env[62522]: DEBUG nova.virt.hardware [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:04Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1393.908837] env[62522]: DEBUG nova.virt.hardware [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1393.909055] env[62522]: DEBUG nova.virt.hardware [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1393.909268] env[62522]: DEBUG nova.virt.hardware [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1393.909462] env[62522]: DEBUG nova.virt.hardware [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1393.909649] env[62522]: DEBUG nova.virt.hardware [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1393.909870] env[62522]: DEBUG nova.virt.hardware [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1393.910046] env[62522]: DEBUG nova.virt.hardware [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1393.910235] env[62522]: DEBUG nova.virt.hardware [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1393.910404] env[62522]: DEBUG nova.virt.hardware [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1393.910599] env[62522]: DEBUG nova.virt.hardware [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1393.916202] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3aa0aca0-c839-419e-8294-e3f6d1db4ab5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.932236] env[62522]: DEBUG oslo_vmware.api [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1393.932236] env[62522]: value = "task-2416586" [ 1393.932236] env[62522]: _type = "Task" [ 1393.932236] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.941916] env[62522]: DEBUG oslo_vmware.api [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416586, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.299987] env[62522]: DEBUG oslo_vmware.api [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Task: {'id': task-2416585, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162678} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.300376] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1394.300439] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1394.300622] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1394.300780] env[62522]: INFO nova.compute.manager [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1394.301032] env[62522]: DEBUG oslo.service.loopingcall [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1394.301230] env[62522]: DEBUG nova.compute.manager [-] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1394.301325] env[62522]: DEBUG nova.network.neutron [-] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1394.421473] env[62522]: DEBUG oslo_concurrency.lockutils [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1394.421737] env[62522]: DEBUG oslo_concurrency.lockutils [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1394.423347] env[62522]: INFO nova.compute.claims [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1394.441624] env[62522]: DEBUG oslo_vmware.api [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416586, 'name': ReconfigVM_Task, 'duration_secs': 0.151666} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.441914] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Updating instance '75ba1afc-3586-4bb0-ae7f-ebf5a794f068' progress to 33 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1394.577444] env[62522]: DEBUG nova.compute.manager [req-f205a780-e09f-473c-9770-5bf9446acce4 req-ff1d086d-3cbf-4927-8f9a-d632a14cc890 service nova] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Received event network-vif-deleted-69af92f4-f409-4098-b0e5-8490fc594da1 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1394.577682] env[62522]: INFO nova.compute.manager [req-f205a780-e09f-473c-9770-5bf9446acce4 req-ff1d086d-3cbf-4927-8f9a-d632a14cc890 service nova] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Neutron deleted interface 69af92f4-f409-4098-b0e5-8490fc594da1; detaching it from the instance and deleting it from the info cache [ 1394.577869] env[62522]: DEBUG nova.network.neutron [req-f205a780-e09f-473c-9770-5bf9446acce4 req-ff1d086d-3cbf-4927-8f9a-d632a14cc890 service nova] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1394.948045] env[62522]: DEBUG nova.virt.hardware [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1394.948303] env[62522]: DEBUG nova.virt.hardware [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1394.948461] env[62522]: DEBUG nova.virt.hardware [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1394.948640] env[62522]: DEBUG nova.virt.hardware [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1394.948783] env[62522]: DEBUG nova.virt.hardware [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1394.948925] env[62522]: DEBUG nova.virt.hardware [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1394.949136] env[62522]: DEBUG nova.virt.hardware [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1394.949291] env[62522]: DEBUG nova.virt.hardware [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1394.949488] env[62522]: DEBUG nova.virt.hardware [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1394.949659] env[62522]: DEBUG nova.virt.hardware [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1394.949829] env[62522]: DEBUG nova.virt.hardware [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1394.955027] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Reconfiguring VM instance instance-00000075 to detach disk 2000 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1394.955574] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-84680df2-0c67-4a8a-88ca-2f9035a5d218 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.974598] env[62522]: DEBUG oslo_vmware.api [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1394.974598] env[62522]: value = "task-2416587" [ 1394.974598] env[62522]: _type = "Task" [ 1394.974598] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.981886] env[62522]: DEBUG oslo_vmware.api [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416587, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.058139] env[62522]: DEBUG nova.network.neutron [-] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1395.080179] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-679425ab-f010-4cb4-8dbf-3ca1327ac1a9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.089928] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91345d6f-fba6-40ce-a21f-eaad4fd12db3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.117018] env[62522]: DEBUG nova.compute.manager [req-f205a780-e09f-473c-9770-5bf9446acce4 req-ff1d086d-3cbf-4927-8f9a-d632a14cc890 service nova] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Detach interface failed, port_id=69af92f4-f409-4098-b0e5-8490fc594da1, reason: Instance c464ae64-056f-4629-add9-2ff7a1971ebb could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1395.486129] env[62522]: DEBUG oslo_vmware.api [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416587, 'name': ReconfigVM_Task, 'duration_secs': 0.150611} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.486448] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Reconfigured VM instance instance-00000075 to detach disk 2000 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1395.487222] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0f4f0fc-eb1b-4f82-9155-bd0dc41ba2b2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.508223] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] volume-cb4b8a20-0671-45d5-bf05-a9de0e808c88/volume-cb4b8a20-0671-45d5-bf05-a9de0e808c88.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1395.510614] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e077242-f971-4fda-bc66-b16f4e30e424 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.528505] env[62522]: DEBUG oslo_vmware.api [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1395.528505] env[62522]: value = "task-2416588" [ 1395.528505] env[62522]: _type = "Task" [ 1395.528505] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.536959] env[62522]: DEBUG oslo_vmware.api [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416588, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.560936] env[62522]: INFO nova.compute.manager [-] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Took 1.26 seconds to deallocate network for instance. [ 1395.563666] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-424cd30f-9279-44db-a24c-991a55259236 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.572875] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f78b55b3-28dd-46bc-979d-520ad7d8763c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.604727] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a483e2-e2de-41e3-90be-4c7eea1724c6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.612176] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69a95641-8845-42d1-a83e-2ba7e6b09c92 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.625625] env[62522]: DEBUG nova.compute.provider_tree [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1396.038724] env[62522]: DEBUG oslo_vmware.api [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416588, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.070807] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.129073] env[62522]: DEBUG nova.scheduler.client.report [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1396.538798] env[62522]: DEBUG oslo_vmware.api [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416588, 'name': ReconfigVM_Task, 'duration_secs': 0.802356} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.539151] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Reconfigured VM instance instance-00000075 to attach disk [datastore1] volume-cb4b8a20-0671-45d5-bf05-a9de0e808c88/volume-cb4b8a20-0671-45d5-bf05-a9de0e808c88.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1396.539350] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Updating instance '75ba1afc-3586-4bb0-ae7f-ebf5a794f068' progress to 50 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1396.633482] env[62522]: DEBUG oslo_concurrency.lockutils [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.212s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1396.634045] env[62522]: DEBUG nova.compute.manager [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1396.636745] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.566s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1396.636975] env[62522]: DEBUG nova.objects.instance [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Lazy-loading 'resources' on Instance uuid c464ae64-056f-4629-add9-2ff7a1971ebb {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1397.046339] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cd9c76d-3e8a-46ce-8d11-eff0aaa0a262 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.066590] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdce2f40-367a-4714-b01f-960b57ddc382 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.083246] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Updating instance '75ba1afc-3586-4bb0-ae7f-ebf5a794f068' progress to 67 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1397.139551] env[62522]: DEBUG nova.compute.utils [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1397.140825] env[62522]: DEBUG nova.compute.manager [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1397.140983] env[62522]: DEBUG nova.network.neutron [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1397.181604] env[62522]: DEBUG nova.policy [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'edec975faaef4f2ba31aa0de30590522', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fa792663b4ac41b7bf4c5e4b290f9b86', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1397.236721] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-826535b1-6876-4893-a814-b7b39c4a5991 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.244260] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91f72d49-7122-44c8-882a-367716544436 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.273743] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd71e8d-4fb5-41fb-b5a5-e4e773f86fb1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.280486] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c831744-87df-4540-bc6e-ca66f8dafad7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.294606] env[62522]: DEBUG nova.compute.provider_tree [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1397.438882] env[62522]: DEBUG nova.network.neutron [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Successfully created port: 2e6d7577-6482-4d9e-8729-5b99aa379e44 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1397.646768] env[62522]: DEBUG nova.compute.manager [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1397.798055] env[62522]: DEBUG nova.scheduler.client.report [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1398.303416] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.666s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1398.326998] env[62522]: INFO nova.scheduler.client.report [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Deleted allocations for instance c464ae64-056f-4629-add9-2ff7a1971ebb [ 1398.658063] env[62522]: DEBUG nova.compute.manager [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1398.685340] env[62522]: DEBUG nova.virt.hardware [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1398.685677] env[62522]: DEBUG nova.virt.hardware [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1398.685861] env[62522]: DEBUG nova.virt.hardware [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1398.686146] env[62522]: DEBUG nova.virt.hardware [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1398.686359] env[62522]: DEBUG nova.virt.hardware [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1398.686561] env[62522]: DEBUG nova.virt.hardware [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1398.686886] env[62522]: DEBUG nova.virt.hardware [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1398.687009] env[62522]: DEBUG nova.virt.hardware [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1398.687225] env[62522]: DEBUG nova.virt.hardware [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1398.687443] env[62522]: DEBUG nova.virt.hardware [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1398.687695] env[62522]: DEBUG nova.virt.hardware [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1398.688624] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c40ff6c-39cd-49d1-b44b-1032af10d941 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.699271] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e43b0a10-5710-4691-9b91-1b2033d962bc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.734577] env[62522]: DEBUG nova.network.neutron [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Port 5a9957c5-133c-4c1b-8469-dbd152ccd386 binding to destination host cpu-1 is already ACTIVE {{(pid=62522) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1398.834389] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ccb3acce-18de-4d6d-8d4d-9da5c290e48f tempest-ServerAddressesTestJSON-1100766790 tempest-ServerAddressesTestJSON-1100766790-project-member] Lock "c464ae64-056f-4629-add9-2ff7a1971ebb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.152s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1398.887287] env[62522]: DEBUG nova.compute.manager [req-426a8926-0737-4fe2-8182-f80e57b96327 req-8c1511f1-3c64-4cbd-830a-a38db5fb069a service nova] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Received event network-vif-plugged-2e6d7577-6482-4d9e-8729-5b99aa379e44 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1398.887587] env[62522]: DEBUG oslo_concurrency.lockutils [req-426a8926-0737-4fe2-8182-f80e57b96327 req-8c1511f1-3c64-4cbd-830a-a38db5fb069a service nova] Acquiring lock "d4f56d59-e03f-4eaf-aa2d-b77241e13be3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1398.887750] env[62522]: DEBUG oslo_concurrency.lockutils [req-426a8926-0737-4fe2-8182-f80e57b96327 req-8c1511f1-3c64-4cbd-830a-a38db5fb069a service nova] Lock "d4f56d59-e03f-4eaf-aa2d-b77241e13be3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1398.887844] env[62522]: DEBUG oslo_concurrency.lockutils [req-426a8926-0737-4fe2-8182-f80e57b96327 req-8c1511f1-3c64-4cbd-830a-a38db5fb069a service nova] Lock "d4f56d59-e03f-4eaf-aa2d-b77241e13be3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1398.888018] env[62522]: DEBUG nova.compute.manager [req-426a8926-0737-4fe2-8182-f80e57b96327 req-8c1511f1-3c64-4cbd-830a-a38db5fb069a service nova] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] No waiting events found dispatching network-vif-plugged-2e6d7577-6482-4d9e-8729-5b99aa379e44 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1398.888190] env[62522]: WARNING nova.compute.manager [req-426a8926-0737-4fe2-8182-f80e57b96327 req-8c1511f1-3c64-4cbd-830a-a38db5fb069a service nova] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Received unexpected event network-vif-plugged-2e6d7577-6482-4d9e-8729-5b99aa379e44 for instance with vm_state building and task_state spawning. [ 1398.977602] env[62522]: DEBUG nova.network.neutron [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Successfully updated port: 2e6d7577-6482-4d9e-8729-5b99aa379e44 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1399.482573] env[62522]: DEBUG oslo_concurrency.lockutils [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "refresh_cache-d4f56d59-e03f-4eaf-aa2d-b77241e13be3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1399.482573] env[62522]: DEBUG oslo_concurrency.lockutils [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquired lock "refresh_cache-d4f56d59-e03f-4eaf-aa2d-b77241e13be3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1399.483927] env[62522]: DEBUG nova.network.neutron [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1399.767177] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "75ba1afc-3586-4bb0-ae7f-ebf5a794f068-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1399.767508] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "75ba1afc-3586-4bb0-ae7f-ebf5a794f068-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1399.767598] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "75ba1afc-3586-4bb0-ae7f-ebf5a794f068-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1400.015630] env[62522]: DEBUG nova.network.neutron [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1400.158247] env[62522]: DEBUG nova.network.neutron [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Updating instance_info_cache with network_info: [{"id": "2e6d7577-6482-4d9e-8729-5b99aa379e44", "address": "fa:16:3e:e9:d8:b5", "network": {"id": "2c9c537f-91b6-4217-8eaf-dc187f4ce7d5", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1154766161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fa792663b4ac41b7bf4c5e4b290f9b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e6d7577-64", "ovs_interfaceid": "2e6d7577-6482-4d9e-8729-5b99aa379e44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1400.661444] env[62522]: DEBUG oslo_concurrency.lockutils [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Releasing lock "refresh_cache-d4f56d59-e03f-4eaf-aa2d-b77241e13be3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1400.661838] env[62522]: DEBUG nova.compute.manager [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Instance network_info: |[{"id": "2e6d7577-6482-4d9e-8729-5b99aa379e44", "address": "fa:16:3e:e9:d8:b5", "network": {"id": "2c9c537f-91b6-4217-8eaf-dc187f4ce7d5", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1154766161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fa792663b4ac41b7bf4c5e4b290f9b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e6d7577-64", "ovs_interfaceid": "2e6d7577-6482-4d9e-8729-5b99aa379e44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1400.662261] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e9:d8:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc7aa55d-223a-4157-9137-88dc492f2db2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2e6d7577-6482-4d9e-8729-5b99aa379e44', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1400.669931] env[62522]: DEBUG oslo.service.loopingcall [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1400.670178] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1400.670413] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1e64fee6-0a62-4c38-8a38-c7bb57ece7bc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.691136] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1400.691136] env[62522]: value = "task-2416589" [ 1400.691136] env[62522]: _type = "Task" [ 1400.691136] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.698960] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416589, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.807134] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "refresh_cache-75ba1afc-3586-4bb0-ae7f-ebf5a794f068" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1400.807432] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquired lock "refresh_cache-75ba1afc-3586-4bb0-ae7f-ebf5a794f068" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1400.807507] env[62522]: DEBUG nova.network.neutron [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1400.916557] env[62522]: DEBUG nova.compute.manager [req-5077f5d4-0e3d-4bbe-ac6a-24a03a56bb18 req-89c585b0-4649-4439-bced-72dcf031861a service nova] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Received event network-changed-2e6d7577-6482-4d9e-8729-5b99aa379e44 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1400.916896] env[62522]: DEBUG nova.compute.manager [req-5077f5d4-0e3d-4bbe-ac6a-24a03a56bb18 req-89c585b0-4649-4439-bced-72dcf031861a service nova] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Refreshing instance network info cache due to event network-changed-2e6d7577-6482-4d9e-8729-5b99aa379e44. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1400.917223] env[62522]: DEBUG oslo_concurrency.lockutils [req-5077f5d4-0e3d-4bbe-ac6a-24a03a56bb18 req-89c585b0-4649-4439-bced-72dcf031861a service nova] Acquiring lock "refresh_cache-d4f56d59-e03f-4eaf-aa2d-b77241e13be3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1400.917429] env[62522]: DEBUG oslo_concurrency.lockutils [req-5077f5d4-0e3d-4bbe-ac6a-24a03a56bb18 req-89c585b0-4649-4439-bced-72dcf031861a service nova] Acquired lock "refresh_cache-d4f56d59-e03f-4eaf-aa2d-b77241e13be3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1400.917688] env[62522]: DEBUG nova.network.neutron [req-5077f5d4-0e3d-4bbe-ac6a-24a03a56bb18 req-89c585b0-4649-4439-bced-72dcf031861a service nova] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Refreshing network info cache for port 2e6d7577-6482-4d9e-8729-5b99aa379e44 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1401.203010] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416589, 'name': CreateVM_Task, 'duration_secs': 0.350922} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.203294] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1401.204256] env[62522]: DEBUG oslo_concurrency.lockutils [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1401.204526] env[62522]: DEBUG oslo_concurrency.lockutils [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1401.204969] env[62522]: DEBUG oslo_concurrency.lockutils [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1401.205318] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c572f35-daa2-43a4-a520-e59e219a8932 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.211463] env[62522]: DEBUG oslo_vmware.api [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1401.211463] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521a822d-25fc-1993-1d2f-683cd391ab80" [ 1401.211463] env[62522]: _type = "Task" [ 1401.211463] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.220958] env[62522]: DEBUG oslo_vmware.api [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521a822d-25fc-1993-1d2f-683cd391ab80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.692925] env[62522]: DEBUG nova.network.neutron [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Updating instance_info_cache with network_info: [{"id": "5a9957c5-133c-4c1b-8469-dbd152ccd386", "address": "fa:16:3e:52:7c:d9", "network": {"id": "fd993a8b-571c-4873-a5d6-4d8c60e23d38", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2084093882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "071dd4c295a54e388099d5bf0f4e300b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a9957c5-13", "ovs_interfaceid": "5a9957c5-133c-4c1b-8469-dbd152ccd386", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1401.722410] env[62522]: DEBUG oslo_vmware.api [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521a822d-25fc-1993-1d2f-683cd391ab80, 'name': SearchDatastore_Task, 'duration_secs': 0.010553} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.722703] env[62522]: DEBUG oslo_concurrency.lockutils [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1401.722935] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1401.723180] env[62522]: DEBUG oslo_concurrency.lockutils [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1401.723373] env[62522]: DEBUG oslo_concurrency.lockutils [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1401.723518] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1401.723758] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c4804b4-9749-4352-a1a1-d6983009c35d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.731773] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1401.731773] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1401.732450] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96db3ad1-3cf3-437b-97ab-7945b72d7863 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.739135] env[62522]: DEBUG oslo_vmware.api [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1401.739135] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52aa27cf-daf1-d53e-b843-2f7cc664a6da" [ 1401.739135] env[62522]: _type = "Task" [ 1401.739135] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.743711] env[62522]: DEBUG nova.network.neutron [req-5077f5d4-0e3d-4bbe-ac6a-24a03a56bb18 req-89c585b0-4649-4439-bced-72dcf031861a service nova] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Updated VIF entry in instance network info cache for port 2e6d7577-6482-4d9e-8729-5b99aa379e44. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1401.744069] env[62522]: DEBUG nova.network.neutron [req-5077f5d4-0e3d-4bbe-ac6a-24a03a56bb18 req-89c585b0-4649-4439-bced-72dcf031861a service nova] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Updating instance_info_cache with network_info: [{"id": "2e6d7577-6482-4d9e-8729-5b99aa379e44", "address": "fa:16:3e:e9:d8:b5", "network": {"id": "2c9c537f-91b6-4217-8eaf-dc187f4ce7d5", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1154766161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fa792663b4ac41b7bf4c5e4b290f9b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e6d7577-64", "ovs_interfaceid": "2e6d7577-6482-4d9e-8729-5b99aa379e44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1401.748173] env[62522]: DEBUG oslo_vmware.api [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52aa27cf-daf1-d53e-b843-2f7cc664a6da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.195859] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Releasing lock "refresh_cache-75ba1afc-3586-4bb0-ae7f-ebf5a794f068" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1402.250824] env[62522]: DEBUG oslo_concurrency.lockutils [req-5077f5d4-0e3d-4bbe-ac6a-24a03a56bb18 req-89c585b0-4649-4439-bced-72dcf031861a service nova] Releasing lock "refresh_cache-d4f56d59-e03f-4eaf-aa2d-b77241e13be3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1402.251180] env[62522]: DEBUG oslo_vmware.api [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52aa27cf-daf1-d53e-b843-2f7cc664a6da, 'name': SearchDatastore_Task, 'duration_secs': 0.008475} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.251905] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d7bbfe2-bbe2-4303-8798-124587bfc9fd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.256834] env[62522]: DEBUG oslo_vmware.api [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1402.256834] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529805d2-5f04-8446-ae57-dd1285f18009" [ 1402.256834] env[62522]: _type = "Task" [ 1402.256834] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.264020] env[62522]: DEBUG oslo_vmware.api [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529805d2-5f04-8446-ae57-dd1285f18009, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.705198] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87003ed0-817b-4bcf-b1b0-c11bdbd60bdc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.712412] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecc6f091-690c-47f3-a399-4bb5aa244d85 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.767187] env[62522]: DEBUG oslo_vmware.api [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]529805d2-5f04-8446-ae57-dd1285f18009, 'name': SearchDatastore_Task, 'duration_secs': 0.009729} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.767447] env[62522]: DEBUG oslo_concurrency.lockutils [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1402.767724] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] d4f56d59-e03f-4eaf-aa2d-b77241e13be3/d4f56d59-e03f-4eaf-aa2d-b77241e13be3.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1402.768011] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f5a84c10-0922-4c39-b010-6afdbba2f3bc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.776112] env[62522]: DEBUG oslo_vmware.api [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1402.776112] env[62522]: value = "task-2416590" [ 1402.776112] env[62522]: _type = "Task" [ 1402.776112] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.783786] env[62522]: DEBUG oslo_vmware.api [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416590, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.285770] env[62522]: DEBUG oslo_vmware.api [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416590, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.430123} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.286052] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] d4f56d59-e03f-4eaf-aa2d-b77241e13be3/d4f56d59-e03f-4eaf-aa2d-b77241e13be3.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1403.286253] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1403.286489] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c61b46af-2069-4551-8584-959af729081a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.293420] env[62522]: DEBUG oslo_vmware.api [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1403.293420] env[62522]: value = "task-2416591" [ 1403.293420] env[62522]: _type = "Task" [ 1403.293420] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.300447] env[62522]: DEBUG oslo_vmware.api [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416591, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.804565] env[62522]: DEBUG oslo_vmware.api [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416591, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079487} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.804565] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1403.805652] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e3df13-1720-4c40-96ae-4408c29a393d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.810375] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e46126fe-f2ca-4b60-a274-6edf8f9a057a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.830901] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Reconfiguring VM instance instance-00000077 to attach disk [datastore2] d4f56d59-e03f-4eaf-aa2d-b77241e13be3/d4f56d59-e03f-4eaf-aa2d-b77241e13be3.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1403.831450] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5cebd725-2601-43ed-8691-e25b73c7f4f3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.857966] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0380927e-a727-4902-ae02-7514b3f2ff28 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.861933] env[62522]: DEBUG oslo_vmware.api [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1403.861933] env[62522]: value = "task-2416592" [ 1403.861933] env[62522]: _type = "Task" [ 1403.861933] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.864135] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Updating instance '75ba1afc-3586-4bb0-ae7f-ebf5a794f068' progress to 83 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1403.874418] env[62522]: DEBUG oslo_vmware.api [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416592, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.372285] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1404.375465] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d802ba96-e329-4f6f-85e4-ce280f8542fa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.377077] env[62522]: DEBUG oslo_vmware.api [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416592, 'name': ReconfigVM_Task, 'duration_secs': 0.270182} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.377341] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Reconfigured VM instance instance-00000077 to attach disk [datastore2] d4f56d59-e03f-4eaf-aa2d-b77241e13be3/d4f56d59-e03f-4eaf-aa2d-b77241e13be3.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1404.378244] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d41b358c-4bea-4337-b511-c33e362e894d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.383655] env[62522]: DEBUG oslo_vmware.api [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1404.383655] env[62522]: value = "task-2416593" [ 1404.383655] env[62522]: _type = "Task" [ 1404.383655] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.384741] env[62522]: DEBUG oslo_vmware.api [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1404.384741] env[62522]: value = "task-2416594" [ 1404.384741] env[62522]: _type = "Task" [ 1404.384741] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.394635] env[62522]: DEBUG oslo_vmware.api [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416594, 'name': Rename_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.397291] env[62522]: DEBUG oslo_vmware.api [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416593, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.896901] env[62522]: DEBUG oslo_vmware.api [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416593, 'name': PowerOnVM_Task, 'duration_secs': 0.374926} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.899772] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1404.899971] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-1b6ee850-d9e8-4fc0-8f81-863b19ab6594 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Updating instance '75ba1afc-3586-4bb0-ae7f-ebf5a794f068' progress to 100 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1404.903478] env[62522]: DEBUG oslo_vmware.api [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416594, 'name': Rename_Task, 'duration_secs': 0.207896} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.903904] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1404.904151] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a4dfbc93-3f44-40b1-a20a-907a8ad3c033 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.910501] env[62522]: DEBUG oslo_vmware.api [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1404.910501] env[62522]: value = "task-2416595" [ 1404.910501] env[62522]: _type = "Task" [ 1404.910501] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.917522] env[62522]: DEBUG oslo_vmware.api [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416595, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.419950] env[62522]: DEBUG oslo_vmware.api [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416595, 'name': PowerOnVM_Task, 'duration_secs': 0.464503} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.420242] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1405.420433] env[62522]: INFO nova.compute.manager [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Took 6.76 seconds to spawn the instance on the hypervisor. [ 1405.420612] env[62522]: DEBUG nova.compute.manager [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1405.421610] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01d02004-1ecf-4247-8285-61d9a995677d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.940751] env[62522]: INFO nova.compute.manager [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Took 11.54 seconds to build instance. [ 1406.449669] env[62522]: DEBUG oslo_concurrency.lockutils [None req-927e0298-23bb-46b0-9614-293a7debf450 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "d4f56d59-e03f-4eaf-aa2d-b77241e13be3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.053s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1406.453690] env[62522]: DEBUG nova.compute.manager [req-a90b9ba6-4f06-42fc-829e-50ddd1bd305e req-f49bb8de-0db6-4a01-87fd-519434df8063 service nova] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Received event network-changed-2e6d7577-6482-4d9e-8729-5b99aa379e44 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1406.453880] env[62522]: DEBUG nova.compute.manager [req-a90b9ba6-4f06-42fc-829e-50ddd1bd305e req-f49bb8de-0db6-4a01-87fd-519434df8063 service nova] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Refreshing instance network info cache due to event network-changed-2e6d7577-6482-4d9e-8729-5b99aa379e44. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1406.454323] env[62522]: DEBUG oslo_concurrency.lockutils [req-a90b9ba6-4f06-42fc-829e-50ddd1bd305e req-f49bb8de-0db6-4a01-87fd-519434df8063 service nova] Acquiring lock "refresh_cache-d4f56d59-e03f-4eaf-aa2d-b77241e13be3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1406.454475] env[62522]: DEBUG oslo_concurrency.lockutils [req-a90b9ba6-4f06-42fc-829e-50ddd1bd305e req-f49bb8de-0db6-4a01-87fd-519434df8063 service nova] Acquired lock "refresh_cache-d4f56d59-e03f-4eaf-aa2d-b77241e13be3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1406.454641] env[62522]: DEBUG nova.network.neutron [req-a90b9ba6-4f06-42fc-829e-50ddd1bd305e req-f49bb8de-0db6-4a01-87fd-519434df8063 service nova] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Refreshing network info cache for port 2e6d7577-6482-4d9e-8729-5b99aa379e44 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1407.065604] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3f363537-bd28-436b-a3fd-3eeb0d7c227d tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "75ba1afc-3586-4bb0-ae7f-ebf5a794f068" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1407.065866] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3f363537-bd28-436b-a3fd-3eeb0d7c227d tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "75ba1afc-3586-4bb0-ae7f-ebf5a794f068" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1407.066217] env[62522]: DEBUG nova.compute.manager [None req-3f363537-bd28-436b-a3fd-3eeb0d7c227d tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Going to confirm migration 8 {{(pid=62522) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1407.200983] env[62522]: DEBUG nova.network.neutron [req-a90b9ba6-4f06-42fc-829e-50ddd1bd305e req-f49bb8de-0db6-4a01-87fd-519434df8063 service nova] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Updated VIF entry in instance network info cache for port 2e6d7577-6482-4d9e-8729-5b99aa379e44. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1407.201388] env[62522]: DEBUG nova.network.neutron [req-a90b9ba6-4f06-42fc-829e-50ddd1bd305e req-f49bb8de-0db6-4a01-87fd-519434df8063 service nova] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Updating instance_info_cache with network_info: [{"id": "2e6d7577-6482-4d9e-8729-5b99aa379e44", "address": "fa:16:3e:e9:d8:b5", "network": {"id": "2c9c537f-91b6-4217-8eaf-dc187f4ce7d5", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1154766161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fa792663b4ac41b7bf4c5e4b290f9b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e6d7577-64", "ovs_interfaceid": "2e6d7577-6482-4d9e-8729-5b99aa379e44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1407.625766] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3f363537-bd28-436b-a3fd-3eeb0d7c227d tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "refresh_cache-75ba1afc-3586-4bb0-ae7f-ebf5a794f068" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1407.626098] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3f363537-bd28-436b-a3fd-3eeb0d7c227d tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquired lock "refresh_cache-75ba1afc-3586-4bb0-ae7f-ebf5a794f068" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1407.626151] env[62522]: DEBUG nova.network.neutron [None req-3f363537-bd28-436b-a3fd-3eeb0d7c227d tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1407.626316] env[62522]: DEBUG nova.objects.instance [None req-3f363537-bd28-436b-a3fd-3eeb0d7c227d tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lazy-loading 'info_cache' on Instance uuid 75ba1afc-3586-4bb0-ae7f-ebf5a794f068 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1407.704178] env[62522]: DEBUG oslo_concurrency.lockutils [req-a90b9ba6-4f06-42fc-829e-50ddd1bd305e req-f49bb8de-0db6-4a01-87fd-519434df8063 service nova] Releasing lock "refresh_cache-d4f56d59-e03f-4eaf-aa2d-b77241e13be3" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1408.879437] env[62522]: DEBUG nova.network.neutron [None req-3f363537-bd28-436b-a3fd-3eeb0d7c227d tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Updating instance_info_cache with network_info: [{"id": "5a9957c5-133c-4c1b-8469-dbd152ccd386", "address": "fa:16:3e:52:7c:d9", "network": {"id": "fd993a8b-571c-4873-a5d6-4d8c60e23d38", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2084093882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "071dd4c295a54e388099d5bf0f4e300b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a9957c5-13", "ovs_interfaceid": "5a9957c5-133c-4c1b-8469-dbd152ccd386", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1409.381910] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3f363537-bd28-436b-a3fd-3eeb0d7c227d tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Releasing lock "refresh_cache-75ba1afc-3586-4bb0-ae7f-ebf5a794f068" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1409.382211] env[62522]: DEBUG nova.objects.instance [None req-3f363537-bd28-436b-a3fd-3eeb0d7c227d tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lazy-loading 'migration_context' on Instance uuid 75ba1afc-3586-4bb0-ae7f-ebf5a794f068 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1409.885388] env[62522]: DEBUG nova.objects.base [None req-3f363537-bd28-436b-a3fd-3eeb0d7c227d tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Object Instance<75ba1afc-3586-4bb0-ae7f-ebf5a794f068> lazy-loaded attributes: info_cache,migration_context {{(pid=62522) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1409.886396] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e15fc7e0-ab6d-4325-9969-8871d31c6f8f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.905880] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfbef4a1-49da-406b-b258-196340e7d8e1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.911238] env[62522]: DEBUG oslo_vmware.api [None req-3f363537-bd28-436b-a3fd-3eeb0d7c227d tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1409.911238] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522013ed-ac23-6edb-288f-cab1ad0d5d42" [ 1409.911238] env[62522]: _type = "Task" [ 1409.911238] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.919711] env[62522]: DEBUG oslo_vmware.api [None req-3f363537-bd28-436b-a3fd-3eeb0d7c227d tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522013ed-ac23-6edb-288f-cab1ad0d5d42, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.421756] env[62522]: DEBUG oslo_vmware.api [None req-3f363537-bd28-436b-a3fd-3eeb0d7c227d tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]522013ed-ac23-6edb-288f-cab1ad0d5d42, 'name': SearchDatastore_Task, 'duration_secs': 0.008381} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.421999] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3f363537-bd28-436b-a3fd-3eeb0d7c227d tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1410.422242] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3f363537-bd28-436b-a3fd-3eeb0d7c227d tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1411.022883] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3495ca9b-0c43-4a86-ad5d-646684814d21 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.033861] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67f31714-2315-49e1-92ee-2f122a5c9295 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.083403] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6904c705-d9a3-463d-9fff-579a6913aa14 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.093682] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c20ffd60-7902-4259-830e-9050365a26b5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.110506] env[62522]: DEBUG nova.compute.provider_tree [None req-3f363537-bd28-436b-a3fd-3eeb0d7c227d tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1411.613580] env[62522]: DEBUG nova.scheduler.client.report [None req-3f363537-bd28-436b-a3fd-3eeb0d7c227d tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1412.625062] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3f363537-bd28-436b-a3fd-3eeb0d7c227d tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.202s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1412.993032] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1412.993326] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1413.180570] env[62522]: INFO nova.scheduler.client.report [None req-3f363537-bd28-436b-a3fd-3eeb0d7c227d tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Deleted allocation for migration 2d33b26d-c265-410c-aac8-0ac47c5bd0e7 [ 1413.496901] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1413.497134] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1413.497456] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1413.497456] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62522) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1413.498344] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23c66f1f-2cfe-4ea2-ac21-7ea3d76409e0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.506654] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f56be02-604d-44d3-a5fe-6601d473859f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.520068] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f6acbc-f5f2-4093-942d-59a60744c30b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.526561] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19fe433c-6de2-4860-afec-717cdf39770b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.555157] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180420MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=62522) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1413.555302] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1413.555494] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1413.559432] env[62522]: INFO nova.compute.manager [None req-21375f60-a3cd-4f2a-9305-19d0dab63895 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Get console output [ 1413.559771] env[62522]: WARNING nova.virt.vmwareapi.driver [None req-21375f60-a3cd-4f2a-9305-19d0dab63895 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] The console log is missing. Check your VSPC configuration [ 1413.687053] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3f363537-bd28-436b-a3fd-3eeb0d7c227d tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "75ba1afc-3586-4bb0-ae7f-ebf5a794f068" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.621s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1414.583249] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance b31195c2-29f4-475c-baa7-fcb4791b7278 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1414.583511] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 27f4b976-7dff-49b0-9b00-7515cb976e72 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1414.583723] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance fd9af7c3-358e-417f-97f4-fd2d67d21300 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1414.583910] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance c95f697b-0d68-489d-bfc4-9d129eab1be2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1414.584109] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 75ba1afc-3586-4bb0-ae7f-ebf5a794f068 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1414.584304] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance d4f56d59-e03f-4eaf-aa2d-b77241e13be3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1414.584563] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=62522) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1414.584782] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1728MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=62522) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1414.663682] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f41c183f-628f-4be5-9230-39b7015278e2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.671775] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d6bad9a-d0dd-4651-8afc-8cc1a6672cb1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.701228] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e890fbe6-d395-4217-bcae-a8358f63c84b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.707956] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1e76e65-dcc1-49b0-8550-01a4ad3e2451 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.720659] env[62522]: DEBUG nova.compute.provider_tree [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1415.224359] env[62522]: DEBUG nova.scheduler.client.report [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1415.729331] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62522) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1415.729724] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.174s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1416.979824] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1416.980266] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1417.486124] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1417.486397] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Starting heal instance info cache {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1418.018305] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "refresh_cache-27f4b976-7dff-49b0-9b00-7515cb976e72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1418.018584] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquired lock "refresh_cache-27f4b976-7dff-49b0-9b00-7515cb976e72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1418.018584] env[62522]: DEBUG nova.network.neutron [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Forcefully refreshing network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1419.237335] env[62522]: DEBUG nova.network.neutron [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Updating instance_info_cache with network_info: [{"id": "cf4b3978-2fa2-4182-9422-abf29faafcf6", "address": "fa:16:3e:74:26:e7", "network": {"id": "e3153dad-6ab7-45a3-bda4-5ebbd95258f4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-521501740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab9d5d3c27d4c218b88e4a029300a66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf4b3978-2f", "ovs_interfaceid": "cf4b3978-2fa2-4182-9422-abf29faafcf6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1419.740639] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Releasing lock "refresh_cache-27f4b976-7dff-49b0-9b00-7515cb976e72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1419.740838] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Updated the network info_cache for instance {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 1419.741068] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1419.741233] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1419.741382] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1419.741530] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1419.741680] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1419.741808] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62522) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1423.034111] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "fd9af7c3-358e-417f-97f4-fd2d67d21300" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1423.034506] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "fd9af7c3-358e-417f-97f4-fd2d67d21300" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1423.034658] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "fd9af7c3-358e-417f-97f4-fd2d67d21300-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1423.034822] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "fd9af7c3-358e-417f-97f4-fd2d67d21300-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1423.035013] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "fd9af7c3-358e-417f-97f4-fd2d67d21300-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1423.036993] env[62522]: INFO nova.compute.manager [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Terminating instance [ 1423.541331] env[62522]: DEBUG nova.compute.manager [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1423.541567] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1423.542527] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a36ba4-dc8d-4c7d-8c82-b4da5de4ad46 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.550609] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1423.550840] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-85229e75-4a7e-4342-ad94-cd9bfa16ad2e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.558735] env[62522]: DEBUG oslo_vmware.api [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1423.558735] env[62522]: value = "task-2416596" [ 1423.558735] env[62522]: _type = "Task" [ 1423.558735] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.566109] env[62522]: DEBUG oslo_vmware.api [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416596, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.069030] env[62522]: DEBUG oslo_vmware.api [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416596, 'name': PowerOffVM_Task, 'duration_secs': 0.177657} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.069408] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1424.069507] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1424.069802] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-53302cfa-2242-49dd-bde2-8224d6d1b61c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.132674] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1424.132896] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1424.133084] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Deleting the datastore file [datastore2] fd9af7c3-358e-417f-97f4-fd2d67d21300 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1424.133342] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d5cde61e-da77-42af-b063-2a04b682c345 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.140736] env[62522]: DEBUG oslo_vmware.api [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1424.140736] env[62522]: value = "task-2416598" [ 1424.140736] env[62522]: _type = "Task" [ 1424.140736] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.148492] env[62522]: DEBUG oslo_vmware.api [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416598, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.651843] env[62522]: DEBUG oslo_vmware.api [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416598, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142078} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.652066] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1424.652227] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1424.652408] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1424.652584] env[62522]: INFO nova.compute.manager [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1424.652833] env[62522]: DEBUG oslo.service.loopingcall [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1424.653038] env[62522]: DEBUG nova.compute.manager [-] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1424.653143] env[62522]: DEBUG nova.network.neutron [-] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1425.101355] env[62522]: DEBUG nova.compute.manager [req-c91c6372-8665-4880-9dd2-992b0b5a528c req-7f320515-7af2-4b96-bf37-251e5f355082 service nova] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Received event network-vif-deleted-2c6f1904-1976-45eb-9380-5262c08450d2 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1425.101649] env[62522]: INFO nova.compute.manager [req-c91c6372-8665-4880-9dd2-992b0b5a528c req-7f320515-7af2-4b96-bf37-251e5f355082 service nova] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Neutron deleted interface 2c6f1904-1976-45eb-9380-5262c08450d2; detaching it from the instance and deleting it from the info cache [ 1425.101773] env[62522]: DEBUG nova.network.neutron [req-c91c6372-8665-4880-9dd2-992b0b5a528c req-7f320515-7af2-4b96-bf37-251e5f355082 service nova] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1425.583526] env[62522]: DEBUG nova.network.neutron [-] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1425.604296] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-67365198-c2a6-42fd-b416-a9a1b74737db {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.614046] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfb0d980-5bcb-4991-aed4-83e38090e92f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.640651] env[62522]: DEBUG nova.compute.manager [req-c91c6372-8665-4880-9dd2-992b0b5a528c req-7f320515-7af2-4b96-bf37-251e5f355082 service nova] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Detach interface failed, port_id=2c6f1904-1976-45eb-9380-5262c08450d2, reason: Instance fd9af7c3-358e-417f-97f4-fd2d67d21300 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1426.086056] env[62522]: INFO nova.compute.manager [-] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Took 1.43 seconds to deallocate network for instance. [ 1426.592267] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1426.592605] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1426.592763] env[62522]: DEBUG nova.objects.instance [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lazy-loading 'resources' on Instance uuid fd9af7c3-358e-417f-97f4-fd2d67d21300 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1427.175490] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14219ef3-8a59-4a20-ab0b-0cb3317dc4bf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.182738] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a929a61-9390-4041-98fa-b2c345390f36 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.213851] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57398c93-a6f7-4612-b78a-5edeec6a99fa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.221480] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-906a5882-6cfc-4343-81d9-4b8fc914870a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.234145] env[62522]: DEBUG nova.compute.provider_tree [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1427.737907] env[62522]: DEBUG nova.scheduler.client.report [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1428.242725] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.650s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1428.262346] env[62522]: INFO nova.scheduler.client.report [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Deleted allocations for instance fd9af7c3-358e-417f-97f4-fd2d67d21300 [ 1428.770408] env[62522]: DEBUG oslo_concurrency.lockutils [None req-b8a2421c-43df-4cb1-8a2f-8deeb8337584 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "fd9af7c3-358e-417f-97f4-fd2d67d21300" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.736s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1431.016429] env[62522]: DEBUG nova.compute.manager [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Stashing vm_state: active {{(pid=62522) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1431.532649] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1431.532908] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1432.038227] env[62522]: INFO nova.compute.claims [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1432.544666] env[62522]: INFO nova.compute.resource_tracker [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Updating resource usage from migration cf085b26-49a3-4351-ae36-7464fdb8967b [ 1432.620309] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-800f6b95-c521-4c61-b81d-49e87c9864b3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.630325] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-278034ac-5f2e-4d51-a841-5f4cb883b599 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.662701] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf223196-1183-49d0-9f33-cc5a3084165f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.669572] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54ef580b-e501-47b2-8f27-9767c246d5d3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.682212] env[62522]: DEBUG nova.compute.provider_tree [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1433.185146] env[62522]: DEBUG nova.scheduler.client.report [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1433.691043] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.158s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.691169] env[62522]: INFO nova.compute.manager [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Migrating [ 1434.205635] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "refresh_cache-27f4b976-7dff-49b0-9b00-7515cb976e72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1434.205961] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquired lock "refresh_cache-27f4b976-7dff-49b0-9b00-7515cb976e72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1434.206048] env[62522]: DEBUG nova.network.neutron [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1434.918722] env[62522]: DEBUG nova.network.neutron [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Updating instance_info_cache with network_info: [{"id": "cf4b3978-2fa2-4182-9422-abf29faafcf6", "address": "fa:16:3e:74:26:e7", "network": {"id": "e3153dad-6ab7-45a3-bda4-5ebbd95258f4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-521501740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab9d5d3c27d4c218b88e4a029300a66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf4b3978-2f", "ovs_interfaceid": "cf4b3978-2fa2-4182-9422-abf29faafcf6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1435.421308] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Releasing lock "refresh_cache-27f4b976-7dff-49b0-9b00-7515cb976e72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1436.935802] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c79aded6-d131-456b-95fc-61d555217a5c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.954731] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Updating instance '27f4b976-7dff-49b0-9b00-7515cb976e72' progress to 0 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1437.461052] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1437.461322] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5a8189ea-4f83-4827-8b3b-dee3688c7d5d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.470198] env[62522]: DEBUG oslo_vmware.api [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1437.470198] env[62522]: value = "task-2416599" [ 1437.470198] env[62522]: _type = "Task" [ 1437.470198] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.477608] env[62522]: DEBUG oslo_vmware.api [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416599, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.980503] env[62522]: DEBUG oslo_vmware.api [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416599, 'name': PowerOffVM_Task, 'duration_secs': 0.16637} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.980869] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1437.980915] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Updating instance '27f4b976-7dff-49b0-9b00-7515cb976e72' progress to 17 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1438.487822] env[62522]: DEBUG nova.virt.hardware [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:04Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1438.488115] env[62522]: DEBUG nova.virt.hardware [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1438.488239] env[62522]: DEBUG nova.virt.hardware [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1438.488419] env[62522]: DEBUG nova.virt.hardware [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1438.488568] env[62522]: DEBUG nova.virt.hardware [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1438.488717] env[62522]: DEBUG nova.virt.hardware [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1438.488925] env[62522]: DEBUG nova.virt.hardware [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1438.489099] env[62522]: DEBUG nova.virt.hardware [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1438.489270] env[62522]: DEBUG nova.virt.hardware [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1438.489434] env[62522]: DEBUG nova.virt.hardware [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1438.489609] env[62522]: DEBUG nova.virt.hardware [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1438.494734] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dacc893b-bc19-4fa8-b637-8834b2a62e17 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.510278] env[62522]: DEBUG oslo_vmware.api [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1438.510278] env[62522]: value = "task-2416600" [ 1438.510278] env[62522]: _type = "Task" [ 1438.510278] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.518673] env[62522]: DEBUG oslo_vmware.api [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416600, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.019920] env[62522]: DEBUG oslo_vmware.api [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416600, 'name': ReconfigVM_Task, 'duration_secs': 0.187299} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.020845] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Updating instance '27f4b976-7dff-49b0-9b00-7515cb976e72' progress to 33 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1439.527656] env[62522]: DEBUG nova.virt.hardware [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1439.527993] env[62522]: DEBUG nova.virt.hardware [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1439.528213] env[62522]: DEBUG nova.virt.hardware [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1439.528497] env[62522]: DEBUG nova.virt.hardware [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1439.528727] env[62522]: DEBUG nova.virt.hardware [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1439.528966] env[62522]: DEBUG nova.virt.hardware [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1439.529278] env[62522]: DEBUG nova.virt.hardware [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1439.529520] env[62522]: DEBUG nova.virt.hardware [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1439.529797] env[62522]: DEBUG nova.virt.hardware [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1439.530076] env[62522]: DEBUG nova.virt.hardware [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1439.530344] env[62522]: DEBUG nova.virt.hardware [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1439.535786] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Reconfiguring VM instance instance-0000006b to detach disk 2000 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1439.536150] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4953bcf9-6216-4ab8-b399-118b8bd00c11 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.554883] env[62522]: DEBUG oslo_vmware.api [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1439.554883] env[62522]: value = "task-2416601" [ 1439.554883] env[62522]: _type = "Task" [ 1439.554883] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.562727] env[62522]: DEBUG oslo_vmware.api [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416601, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.064084] env[62522]: DEBUG oslo_vmware.api [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416601, 'name': ReconfigVM_Task, 'duration_secs': 0.17035} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.064364] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Reconfigured VM instance instance-0000006b to detach disk 2000 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1440.066027] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-260a1e83-2014-4bcb-b4b8-e65fe5123df5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.086717] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] 27f4b976-7dff-49b0-9b00-7515cb976e72/27f4b976-7dff-49b0-9b00-7515cb976e72.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1440.086960] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4c56c17b-9ee8-42d0-bd2b-d8fbbdedd89a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.104657] env[62522]: DEBUG oslo_vmware.api [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1440.104657] env[62522]: value = "task-2416602" [ 1440.104657] env[62522]: _type = "Task" [ 1440.104657] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.111976] env[62522]: DEBUG oslo_vmware.api [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416602, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.470562] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "75ba1afc-3586-4bb0-ae7f-ebf5a794f068" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1440.470862] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "75ba1afc-3586-4bb0-ae7f-ebf5a794f068" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1440.471031] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "75ba1afc-3586-4bb0-ae7f-ebf5a794f068-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1440.471225] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "75ba1afc-3586-4bb0-ae7f-ebf5a794f068-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1440.471401] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "75ba1afc-3586-4bb0-ae7f-ebf5a794f068-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1440.473551] env[62522]: INFO nova.compute.manager [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Terminating instance [ 1440.614232] env[62522]: DEBUG oslo_vmware.api [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416602, 'name': ReconfigVM_Task, 'duration_secs': 0.249289} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.614484] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Reconfigured VM instance instance-0000006b to attach disk [datastore1] 27f4b976-7dff-49b0-9b00-7515cb976e72/27f4b976-7dff-49b0-9b00-7515cb976e72.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1440.614753] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Updating instance '27f4b976-7dff-49b0-9b00-7515cb976e72' progress to 50 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1440.977927] env[62522]: DEBUG nova.compute.manager [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1440.978221] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1440.978504] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-17016c76-b4c5-46d9-8fce-73723e09d3d1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.985383] env[62522]: DEBUG oslo_vmware.api [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1440.985383] env[62522]: value = "task-2416603" [ 1440.985383] env[62522]: _type = "Task" [ 1440.985383] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.992772] env[62522]: DEBUG oslo_vmware.api [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416603, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.121262] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39cb719d-776c-4ded-8c86-7fd9b23ef24d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.143054] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9e28809-355c-41a2-ba7b-7303a06a5e74 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.160412] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Updating instance '27f4b976-7dff-49b0-9b00-7515cb976e72' progress to 67 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1441.495274] env[62522]: DEBUG oslo_vmware.api [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416603, 'name': PowerOffVM_Task, 'duration_secs': 0.157433} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.495576] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1441.495851] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Volume detach. Driver type: vmdk {{(pid=62522) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1441.496081] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489882', 'volume_id': 'cb4b8a20-0671-45d5-bf05-a9de0e808c88', 'name': 'volume-cb4b8a20-0671-45d5-bf05-a9de0e808c88', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '75ba1afc-3586-4bb0-ae7f-ebf5a794f068', 'attached_at': '2025-02-10T12:31:54.000000', 'detached_at': '', 'volume_id': 'cb4b8a20-0671-45d5-bf05-a9de0e808c88', 'serial': 'cb4b8a20-0671-45d5-bf05-a9de0e808c88'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1441.496847] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c7bdf81-4721-49be-93eb-a99467a0f59a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.513877] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43db7b53-731e-42b6-9cca-f9a720e82037 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.519948] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5df3bb61-18d9-4a58-adf5-697ddc8ddd58 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.536355] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fc8569f-d2cf-4321-bebf-8dbcd2f4f2a6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.550273] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] The volume has not been displaced from its original location: [datastore1] volume-cb4b8a20-0671-45d5-bf05-a9de0e808c88/volume-cb4b8a20-0671-45d5-bf05-a9de0e808c88.vmdk. No consolidation needed. {{(pid=62522) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1441.555447] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Reconfiguring VM instance instance-00000075 to detach disk 2000 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1441.555745] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cfbfba33-d98c-4aee-a930-ee72959c29ea {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.572518] env[62522]: DEBUG oslo_vmware.api [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1441.572518] env[62522]: value = "task-2416604" [ 1441.572518] env[62522]: _type = "Task" [ 1441.572518] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.579918] env[62522]: DEBUG oslo_vmware.api [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416604, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.711918] env[62522]: DEBUG nova.network.neutron [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Port cf4b3978-2fa2-4182-9422-abf29faafcf6 binding to destination host cpu-1 is already ACTIVE {{(pid=62522) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1442.083057] env[62522]: DEBUG oslo_vmware.api [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416604, 'name': ReconfigVM_Task, 'duration_secs': 0.143665} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.083057] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Reconfigured VM instance instance-00000075 to detach disk 2000 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1442.087281] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e07dc023-1456-4b0f-8bc6-91b1dfe99fab {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.101482] env[62522]: DEBUG oslo_vmware.api [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1442.101482] env[62522]: value = "task-2416605" [ 1442.101482] env[62522]: _type = "Task" [ 1442.101482] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.108838] env[62522]: DEBUG oslo_vmware.api [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416605, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.611164] env[62522]: DEBUG oslo_vmware.api [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416605, 'name': ReconfigVM_Task, 'duration_secs': 0.134535} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.611509] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489882', 'volume_id': 'cb4b8a20-0671-45d5-bf05-a9de0e808c88', 'name': 'volume-cb4b8a20-0671-45d5-bf05-a9de0e808c88', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '75ba1afc-3586-4bb0-ae7f-ebf5a794f068', 'attached_at': '2025-02-10T12:31:54.000000', 'detached_at': '', 'volume_id': 'cb4b8a20-0671-45d5-bf05-a9de0e808c88', 'serial': 'cb4b8a20-0671-45d5-bf05-a9de0e808c88'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1442.611738] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1442.612555] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc30127d-a85e-436f-a35a-24ee661adfc5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.618893] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1442.619133] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f2a27ea5-bf34-4a71-9846-db4d433a20c7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.683101] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1442.683361] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1442.683473] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Deleting the datastore file [datastore1] 75ba1afc-3586-4bb0-ae7f-ebf5a794f068 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1442.683740] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-83722e56-7837-4e72-adf7-7824ee2597dc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.690608] env[62522]: DEBUG oslo_vmware.api [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1442.690608] env[62522]: value = "task-2416607" [ 1442.690608] env[62522]: _type = "Task" [ 1442.690608] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.697976] env[62522]: DEBUG oslo_vmware.api [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416607, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.733045] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "27f4b976-7dff-49b0-9b00-7515cb976e72-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1442.733309] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "27f4b976-7dff-49b0-9b00-7515cb976e72-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1442.733443] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "27f4b976-7dff-49b0-9b00-7515cb976e72-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1443.200405] env[62522]: DEBUG oslo_vmware.api [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416607, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078568} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.200662] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1443.200848] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1443.201055] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1443.201262] env[62522]: INFO nova.compute.manager [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Took 2.22 seconds to destroy the instance on the hypervisor. [ 1443.201508] env[62522]: DEBUG oslo.service.loopingcall [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1443.201696] env[62522]: DEBUG nova.compute.manager [-] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1443.201793] env[62522]: DEBUG nova.network.neutron [-] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1443.665709] env[62522]: DEBUG nova.compute.manager [req-ea1787b0-2bce-41d5-bfec-58e9c0f7abd5 req-4621aca4-bfc6-4dd9-b982-19f6845ee5cc service nova] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Received event network-vif-deleted-5a9957c5-133c-4c1b-8469-dbd152ccd386 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1443.665964] env[62522]: INFO nova.compute.manager [req-ea1787b0-2bce-41d5-bfec-58e9c0f7abd5 req-4621aca4-bfc6-4dd9-b982-19f6845ee5cc service nova] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Neutron deleted interface 5a9957c5-133c-4c1b-8469-dbd152ccd386; detaching it from the instance and deleting it from the info cache [ 1443.666143] env[62522]: DEBUG nova.network.neutron [req-ea1787b0-2bce-41d5-bfec-58e9c0f7abd5 req-4621aca4-bfc6-4dd9-b982-19f6845ee5cc service nova] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1443.765695] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "refresh_cache-27f4b976-7dff-49b0-9b00-7515cb976e72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1443.765931] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquired lock "refresh_cache-27f4b976-7dff-49b0-9b00-7515cb976e72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1443.766137] env[62522]: DEBUG nova.network.neutron [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1444.145073] env[62522]: DEBUG nova.network.neutron [-] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1444.168615] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7c408996-a88f-48e5-90a6-ea2c017912c4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.180499] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9be3a027-aad4-4e42-a47c-8641cdc503ac {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.207028] env[62522]: DEBUG nova.compute.manager [req-ea1787b0-2bce-41d5-bfec-58e9c0f7abd5 req-4621aca4-bfc6-4dd9-b982-19f6845ee5cc service nova] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Detach interface failed, port_id=5a9957c5-133c-4c1b-8469-dbd152ccd386, reason: Instance 75ba1afc-3586-4bb0-ae7f-ebf5a794f068 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1444.494807] env[62522]: DEBUG nova.network.neutron [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Updating instance_info_cache with network_info: [{"id": "cf4b3978-2fa2-4182-9422-abf29faafcf6", "address": "fa:16:3e:74:26:e7", "network": {"id": "e3153dad-6ab7-45a3-bda4-5ebbd95258f4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-521501740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab9d5d3c27d4c218b88e4a029300a66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf4b3978-2f", "ovs_interfaceid": "cf4b3978-2fa2-4182-9422-abf29faafcf6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1444.647789] env[62522]: INFO nova.compute.manager [-] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Took 1.45 seconds to deallocate network for instance. [ 1444.999070] env[62522]: DEBUG oslo_concurrency.lockutils [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Releasing lock "refresh_cache-27f4b976-7dff-49b0-9b00-7515cb976e72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1445.188774] env[62522]: INFO nova.compute.manager [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Took 0.54 seconds to detach 1 volumes for instance. [ 1445.190842] env[62522]: DEBUG nova.compute.manager [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Deleting volume: cb4b8a20-0671-45d5-bf05-a9de0e808c88 {{(pid=62522) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1445.528921] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a797d871-692e-4018-bf97-ebf555133b61 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.547709] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-419f44dc-8754-4de4-b613-753868fa2293 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.554639] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Updating instance '27f4b976-7dff-49b0-9b00-7515cb976e72' progress to 83 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1445.641024] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ade05cbc-3228-4352-906c-7665b7cb1fa9 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "d4f56d59-e03f-4eaf-aa2d-b77241e13be3" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1445.641024] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ade05cbc-3228-4352-906c-7665b7cb1fa9 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "d4f56d59-e03f-4eaf-aa2d-b77241e13be3" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1445.722141] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1445.722378] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1445.722589] env[62522]: DEBUG nova.objects.instance [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lazy-loading 'resources' on Instance uuid 75ba1afc-3586-4bb0-ae7f-ebf5a794f068 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1446.061312] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1446.061653] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-47c3bfd3-7805-4c15-bd8d-78042ceb977b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.071251] env[62522]: DEBUG oslo_vmware.api [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1446.071251] env[62522]: value = "task-2416609" [ 1446.071251] env[62522]: _type = "Task" [ 1446.071251] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.078467] env[62522]: DEBUG oslo_vmware.api [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416609, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.144181] env[62522]: DEBUG nova.compute.utils [None req-ade05cbc-3228-4352-906c-7665b7cb1fa9 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1446.302907] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e44cace0-3f5c-4eb7-adb8-b6ad1ef3ea31 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.310337] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-458a4499-d493-4baf-bcde-7bedf858ff15 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.340720] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66946a9d-2694-4000-b01f-8becfdc15512 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.348268] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d79671b-98b8-4e4f-aeca-438edaeffef9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.362028] env[62522]: DEBUG nova.compute.provider_tree [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1446.580719] env[62522]: DEBUG oslo_vmware.api [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416609, 'name': PowerOnVM_Task, 'duration_secs': 0.375974} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.580993] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1446.581203] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-9b6ac037-08ad-4b08-b2e0-1b09578e5e5d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Updating instance '27f4b976-7dff-49b0-9b00-7515cb976e72' progress to 100 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1446.647966] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ade05cbc-3228-4352-906c-7665b7cb1fa9 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "d4f56d59-e03f-4eaf-aa2d-b77241e13be3" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1446.865736] env[62522]: DEBUG nova.scheduler.client.report [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1447.370153] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.648s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1447.389057] env[62522]: INFO nova.scheduler.client.report [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Deleted allocations for instance 75ba1afc-3586-4bb0-ae7f-ebf5a794f068 [ 1447.705127] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ade05cbc-3228-4352-906c-7665b7cb1fa9 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "d4f56d59-e03f-4eaf-aa2d-b77241e13be3" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1447.705468] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ade05cbc-3228-4352-906c-7665b7cb1fa9 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "d4f56d59-e03f-4eaf-aa2d-b77241e13be3" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1447.705744] env[62522]: INFO nova.compute.manager [None req-ade05cbc-3228-4352-906c-7665b7cb1fa9 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Attaching volume 43ae7e1b-6ba6-42c5-bd69-8d4797f4404d to /dev/sdb [ 1447.740463] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65acea9e-f09a-4dc1-a150-aebec8c0f95b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.747351] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dbfd9ee-beb1-4314-b62d-e88dc9987b98 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.760386] env[62522]: DEBUG nova.virt.block_device [None req-ade05cbc-3228-4352-906c-7665b7cb1fa9 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Updating existing volume attachment record: 15fa5da1-35d7-40ce-81bc-fb4476677414 {{(pid=62522) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1447.898756] env[62522]: DEBUG oslo_concurrency.lockutils [None req-2f746e4a-1895-4944-88f6-394f3697e656 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "75ba1afc-3586-4bb0-ae7f-ebf5a794f068" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.425s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1448.223115] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "c95f697b-0d68-489d-bfc4-9d129eab1be2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1448.223452] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "c95f697b-0d68-489d-bfc4-9d129eab1be2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1448.223704] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "c95f697b-0d68-489d-bfc4-9d129eab1be2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1448.223944] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "c95f697b-0d68-489d-bfc4-9d129eab1be2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1448.224209] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "c95f697b-0d68-489d-bfc4-9d129eab1be2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1448.226897] env[62522]: INFO nova.compute.manager [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Terminating instance [ 1448.730534] env[62522]: DEBUG nova.compute.manager [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1448.730815] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1448.731790] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94db066b-664f-4573-8e33-b90ee2dbd05d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.739718] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1448.739950] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-22bd0e06-b6b3-4cc2-a8f2-960734a8b890 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.747248] env[62522]: DEBUG oslo_vmware.api [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1448.747248] env[62522]: value = "task-2416611" [ 1448.747248] env[62522]: _type = "Task" [ 1448.747248] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.755345] env[62522]: DEBUG oslo_vmware.api [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416611, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.257270] env[62522]: DEBUG oslo_vmware.api [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416611, 'name': PowerOffVM_Task, 'duration_secs': 0.203202} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.257544] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1449.257717] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1449.257965] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d7ff9320-fa46-42d6-85e1-08bffb135824 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.313402] env[62522]: DEBUG oslo_concurrency.lockutils [None req-91e5eeef-a6ee-4d45-b9d3-d2e37733c26c tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "27f4b976-7dff-49b0-9b00-7515cb976e72" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1449.313695] env[62522]: DEBUG oslo_concurrency.lockutils [None req-91e5eeef-a6ee-4d45-b9d3-d2e37733c26c tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "27f4b976-7dff-49b0-9b00-7515cb976e72" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1449.314023] env[62522]: DEBUG nova.compute.manager [None req-91e5eeef-a6ee-4d45-b9d3-d2e37733c26c tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Going to confirm migration 9 {{(pid=62522) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1449.321031] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1449.321315] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1449.321573] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Deleting the datastore file [datastore1] c95f697b-0d68-489d-bfc4-9d129eab1be2 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1449.321937] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e672d04d-7e5f-4d50-86b5-1b105251e4c7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.329749] env[62522]: DEBUG oslo_vmware.api [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1449.329749] env[62522]: value = "task-2416613" [ 1449.329749] env[62522]: _type = "Task" [ 1449.329749] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.338063] env[62522]: DEBUG oslo_vmware.api [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416613, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.840027] env[62522]: DEBUG oslo_vmware.api [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416613, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132884} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.840465] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1449.840522] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1449.840682] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1449.840859] env[62522]: INFO nova.compute.manager [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1449.841134] env[62522]: DEBUG oslo.service.loopingcall [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1449.841340] env[62522]: DEBUG nova.compute.manager [-] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1449.841820] env[62522]: DEBUG nova.network.neutron [-] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1449.864389] env[62522]: DEBUG oslo_concurrency.lockutils [None req-91e5eeef-a6ee-4d45-b9d3-d2e37733c26c tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "refresh_cache-27f4b976-7dff-49b0-9b00-7515cb976e72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1449.864562] env[62522]: DEBUG oslo_concurrency.lockutils [None req-91e5eeef-a6ee-4d45-b9d3-d2e37733c26c tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquired lock "refresh_cache-27f4b976-7dff-49b0-9b00-7515cb976e72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1449.864735] env[62522]: DEBUG nova.network.neutron [None req-91e5eeef-a6ee-4d45-b9d3-d2e37733c26c tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1449.864915] env[62522]: DEBUG nova.objects.instance [None req-91e5eeef-a6ee-4d45-b9d3-d2e37733c26c tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lazy-loading 'info_cache' on Instance uuid 27f4b976-7dff-49b0-9b00-7515cb976e72 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1450.136865] env[62522]: DEBUG nova.compute.manager [req-5db1856d-8d3d-4dde-aaf8-186df33f453b req-6cadd9ba-fc1c-49d1-96d5-295ec487adab service nova] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Received event network-vif-deleted-74c8cccc-0aa8-4147-9172-cbb2cbfcb35f {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1450.137079] env[62522]: INFO nova.compute.manager [req-5db1856d-8d3d-4dde-aaf8-186df33f453b req-6cadd9ba-fc1c-49d1-96d5-295ec487adab service nova] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Neutron deleted interface 74c8cccc-0aa8-4147-9172-cbb2cbfcb35f; detaching it from the instance and deleting it from the info cache [ 1450.137274] env[62522]: DEBUG nova.network.neutron [req-5db1856d-8d3d-4dde-aaf8-186df33f453b req-6cadd9ba-fc1c-49d1-96d5-295ec487adab service nova] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1450.615638] env[62522]: DEBUG nova.network.neutron [-] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1450.640146] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-77685b96-8434-475b-86d2-981b1b50cec3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.649605] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e94c57-62b9-4f7a-9198-dd1730d49da4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.676854] env[62522]: DEBUG nova.compute.manager [req-5db1856d-8d3d-4dde-aaf8-186df33f453b req-6cadd9ba-fc1c-49d1-96d5-295ec487adab service nova] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Detach interface failed, port_id=74c8cccc-0aa8-4147-9172-cbb2cbfcb35f, reason: Instance c95f697b-0d68-489d-bfc4-9d129eab1be2 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1451.097640] env[62522]: DEBUG nova.network.neutron [None req-91e5eeef-a6ee-4d45-b9d3-d2e37733c26c tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Updating instance_info_cache with network_info: [{"id": "cf4b3978-2fa2-4182-9422-abf29faafcf6", "address": "fa:16:3e:74:26:e7", "network": {"id": "e3153dad-6ab7-45a3-bda4-5ebbd95258f4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-521501740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab9d5d3c27d4c218b88e4a029300a66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf4b3978-2f", "ovs_interfaceid": "cf4b3978-2fa2-4182-9422-abf29faafcf6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1451.118450] env[62522]: INFO nova.compute.manager [-] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Took 1.28 seconds to deallocate network for instance. [ 1451.600659] env[62522]: DEBUG oslo_concurrency.lockutils [None req-91e5eeef-a6ee-4d45-b9d3-d2e37733c26c tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Releasing lock "refresh_cache-27f4b976-7dff-49b0-9b00-7515cb976e72" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1451.600939] env[62522]: DEBUG nova.objects.instance [None req-91e5eeef-a6ee-4d45-b9d3-d2e37733c26c tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lazy-loading 'migration_context' on Instance uuid 27f4b976-7dff-49b0-9b00-7515cb976e72 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1451.624812] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1451.624812] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1451.625124] env[62522]: DEBUG nova.objects.instance [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lazy-loading 'resources' on Instance uuid c95f697b-0d68-489d-bfc4-9d129eab1be2 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1452.104086] env[62522]: DEBUG nova.objects.base [None req-91e5eeef-a6ee-4d45-b9d3-d2e37733c26c tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Object Instance<27f4b976-7dff-49b0-9b00-7515cb976e72> lazy-loaded attributes: info_cache,migration_context {{(pid=62522) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1452.105062] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce8bcbb7-0c1a-405c-80f1-efaa290b8b34 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.125631] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3be0a0bf-f508-41f8-b375-c4e92312319e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.134011] env[62522]: DEBUG oslo_vmware.api [None req-91e5eeef-a6ee-4d45-b9d3-d2e37733c26c tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1452.134011] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52880b30-0610-905c-64f9-f5bc290a74fb" [ 1452.134011] env[62522]: _type = "Task" [ 1452.134011] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.141974] env[62522]: DEBUG oslo_vmware.api [None req-91e5eeef-a6ee-4d45-b9d3-d2e37733c26c tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52880b30-0610-905c-64f9-f5bc290a74fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.207448] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76495cd5-e21d-45a0-8e0b-f4c08b7a3105 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.214550] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7deaea1-4adc-405a-831c-c26f8e88e154 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.245866] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d648fd3-ec02-4fbb-8bf0-b4de3e11a59c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.252042] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0609205-87b6-4b0c-ab30-09043035e417 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.265927] env[62522]: DEBUG nova.compute.provider_tree [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1452.303119] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ade05cbc-3228-4352-906c-7665b7cb1fa9 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Volume attach. Driver type: vmdk {{(pid=62522) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1452.303401] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ade05cbc-3228-4352-906c-7665b7cb1fa9 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489890', 'volume_id': '43ae7e1b-6ba6-42c5-bd69-8d4797f4404d', 'name': 'volume-43ae7e1b-6ba6-42c5-bd69-8d4797f4404d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd4f56d59-e03f-4eaf-aa2d-b77241e13be3', 'attached_at': '', 'detached_at': '', 'volume_id': '43ae7e1b-6ba6-42c5-bd69-8d4797f4404d', 'serial': '43ae7e1b-6ba6-42c5-bd69-8d4797f4404d'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1452.304267] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cbd4fd8-c792-4275-8e41-0a8de86f815b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.319813] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc06cd3e-26ca-4b1e-9521-1e4a4a4361db {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.343817] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ade05cbc-3228-4352-906c-7665b7cb1fa9 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Reconfiguring VM instance instance-00000077 to attach disk [datastore2] volume-43ae7e1b-6ba6-42c5-bd69-8d4797f4404d/volume-43ae7e1b-6ba6-42c5-bd69-8d4797f4404d.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1452.344356] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-668195a2-3d15-45d7-8f57-3a7813ca2ece {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.361724] env[62522]: DEBUG oslo_vmware.api [None req-ade05cbc-3228-4352-906c-7665b7cb1fa9 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1452.361724] env[62522]: value = "task-2416615" [ 1452.361724] env[62522]: _type = "Task" [ 1452.361724] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.369711] env[62522]: DEBUG oslo_vmware.api [None req-ade05cbc-3228-4352-906c-7665b7cb1fa9 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416615, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.646073] env[62522]: DEBUG oslo_vmware.api [None req-91e5eeef-a6ee-4d45-b9d3-d2e37733c26c tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52880b30-0610-905c-64f9-f5bc290a74fb, 'name': SearchDatastore_Task, 'duration_secs': 0.007025} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.646421] env[62522]: DEBUG oslo_concurrency.lockutils [None req-91e5eeef-a6ee-4d45-b9d3-d2e37733c26c tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1452.768930] env[62522]: DEBUG nova.scheduler.client.report [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1452.871360] env[62522]: DEBUG oslo_vmware.api [None req-ade05cbc-3228-4352-906c-7665b7cb1fa9 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416615, 'name': ReconfigVM_Task, 'duration_secs': 0.322906} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.871659] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ade05cbc-3228-4352-906c-7665b7cb1fa9 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Reconfigured VM instance instance-00000077 to attach disk [datastore2] volume-43ae7e1b-6ba6-42c5-bd69-8d4797f4404d/volume-43ae7e1b-6ba6-42c5-bd69-8d4797f4404d.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1452.876308] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b4566b5e-cd07-478a-b33e-a4c0032e7bce {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.891447] env[62522]: DEBUG oslo_vmware.api [None req-ade05cbc-3228-4352-906c-7665b7cb1fa9 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1452.891447] env[62522]: value = "task-2416616" [ 1452.891447] env[62522]: _type = "Task" [ 1452.891447] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.898995] env[62522]: DEBUG oslo_vmware.api [None req-ade05cbc-3228-4352-906c-7665b7cb1fa9 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416616, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.273954] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.649s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1453.276267] env[62522]: DEBUG oslo_concurrency.lockutils [None req-91e5eeef-a6ee-4d45-b9d3-d2e37733c26c tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.630s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1453.295575] env[62522]: INFO nova.scheduler.client.report [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Deleted allocations for instance c95f697b-0d68-489d-bfc4-9d129eab1be2 [ 1453.401429] env[62522]: DEBUG oslo_vmware.api [None req-ade05cbc-3228-4352-906c-7665b7cb1fa9 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416616, 'name': ReconfigVM_Task, 'duration_secs': 0.141977} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.401732] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ade05cbc-3228-4352-906c-7665b7cb1fa9 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489890', 'volume_id': '43ae7e1b-6ba6-42c5-bd69-8d4797f4404d', 'name': 'volume-43ae7e1b-6ba6-42c5-bd69-8d4797f4404d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd4f56d59-e03f-4eaf-aa2d-b77241e13be3', 'attached_at': '', 'detached_at': '', 'volume_id': '43ae7e1b-6ba6-42c5-bd69-8d4797f4404d', 'serial': '43ae7e1b-6ba6-42c5-bd69-8d4797f4404d'} {{(pid=62522) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1453.802205] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f667e609-b9a6-4162-9209-98b4d7950852 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "c95f697b-0d68-489d-bfc4-9d129eab1be2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.579s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1453.840509] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88275d66-893c-4f5c-b5bd-c13a94ea51c8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.848296] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d191e4-f47e-4e62-a714-164a83038baa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.877279] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df39e2f1-9d85-4232-bc87-e1a94e447a81 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.883887] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1654671f-2a71-4e07-91cd-e58227bd6fb8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.896666] env[62522]: DEBUG nova.compute.provider_tree [None req-91e5eeef-a6ee-4d45-b9d3-d2e37733c26c tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1454.399773] env[62522]: DEBUG nova.scheduler.client.report [None req-91e5eeef-a6ee-4d45-b9d3-d2e37733c26c tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1454.436456] env[62522]: DEBUG nova.objects.instance [None req-ade05cbc-3228-4352-906c-7665b7cb1fa9 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lazy-loading 'flavor' on Instance uuid d4f56d59-e03f-4eaf-aa2d-b77241e13be3 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1454.941359] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ade05cbc-3228-4352-906c-7665b7cb1fa9 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "d4f56d59-e03f-4eaf-aa2d-b77241e13be3" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.236s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1455.130698] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5ae786c8-4a00-4938-8f9f-2828a37aaa75 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "d4f56d59-e03f-4eaf-aa2d-b77241e13be3" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1455.130959] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5ae786c8-4a00-4938-8f9f-2828a37aaa75 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "d4f56d59-e03f-4eaf-aa2d-b77241e13be3" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1455.409995] env[62522]: DEBUG oslo_concurrency.lockutils [None req-91e5eeef-a6ee-4d45-b9d3-d2e37733c26c tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.134s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1455.634268] env[62522]: INFO nova.compute.manager [None req-5ae786c8-4a00-4938-8f9f-2828a37aaa75 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Detaching volume 43ae7e1b-6ba6-42c5-bd69-8d4797f4404d [ 1455.667054] env[62522]: INFO nova.virt.block_device [None req-5ae786c8-4a00-4938-8f9f-2828a37aaa75 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Attempting to driver detach volume 43ae7e1b-6ba6-42c5-bd69-8d4797f4404d from mountpoint /dev/sdb [ 1455.667054] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ae786c8-4a00-4938-8f9f-2828a37aaa75 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Volume detach. Driver type: vmdk {{(pid=62522) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1455.667054] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ae786c8-4a00-4938-8f9f-2828a37aaa75 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489890', 'volume_id': '43ae7e1b-6ba6-42c5-bd69-8d4797f4404d', 'name': 'volume-43ae7e1b-6ba6-42c5-bd69-8d4797f4404d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd4f56d59-e03f-4eaf-aa2d-b77241e13be3', 'attached_at': '', 'detached_at': '', 'volume_id': '43ae7e1b-6ba6-42c5-bd69-8d4797f4404d', 'serial': '43ae7e1b-6ba6-42c5-bd69-8d4797f4404d'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1455.667054] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b9f914-3f6f-4276-8665-5867a52f707d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.688568] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19661814-6d30-4490-b56c-7bb7a43f8ddd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.695310] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8171c3b3-3e0a-4485-9dee-5faeae3c6dd9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.715163] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4e96050-0e50-401c-89ef-2eb5cf8875bf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.729181] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ae786c8-4a00-4938-8f9f-2828a37aaa75 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] The volume has not been displaced from its original location: [datastore2] volume-43ae7e1b-6ba6-42c5-bd69-8d4797f4404d/volume-43ae7e1b-6ba6-42c5-bd69-8d4797f4404d.vmdk. No consolidation needed. {{(pid=62522) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1455.734289] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ae786c8-4a00-4938-8f9f-2828a37aaa75 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Reconfiguring VM instance instance-00000077 to detach disk 2001 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1455.734557] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0c0d18c-77af-418e-9584-5a5ebd5888f1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.752090] env[62522]: DEBUG oslo_vmware.api [None req-5ae786c8-4a00-4938-8f9f-2828a37aaa75 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1455.752090] env[62522]: value = "task-2416618" [ 1455.752090] env[62522]: _type = "Task" [ 1455.752090] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.759456] env[62522]: DEBUG oslo_vmware.api [None req-5ae786c8-4a00-4938-8f9f-2828a37aaa75 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416618, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.964058] env[62522]: INFO nova.scheduler.client.report [None req-91e5eeef-a6ee-4d45-b9d3-d2e37733c26c tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Deleted allocation for migration cf085b26-49a3-4351-ae36-7464fdb8967b [ 1456.261766] env[62522]: DEBUG oslo_vmware.api [None req-5ae786c8-4a00-4938-8f9f-2828a37aaa75 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416618, 'name': ReconfigVM_Task, 'duration_secs': 0.219412} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.262064] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ae786c8-4a00-4938-8f9f-2828a37aaa75 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Reconfigured VM instance instance-00000077 to detach disk 2001 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1456.266629] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-486c38c3-4574-4685-929a-4c18a542ed07 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.281286] env[62522]: DEBUG oslo_vmware.api [None req-5ae786c8-4a00-4938-8f9f-2828a37aaa75 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1456.281286] env[62522]: value = "task-2416619" [ 1456.281286] env[62522]: _type = "Task" [ 1456.281286] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.288852] env[62522]: DEBUG oslo_vmware.api [None req-5ae786c8-4a00-4938-8f9f-2828a37aaa75 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416619, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.361594] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7968b0e6-4712-436e-b3b6-e16dda22dd33 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "27f4b976-7dff-49b0-9b00-7515cb976e72" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1456.469593] env[62522]: DEBUG oslo_concurrency.lockutils [None req-91e5eeef-a6ee-4d45-b9d3-d2e37733c26c tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "27f4b976-7dff-49b0-9b00-7515cb976e72" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.156s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1456.470737] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7968b0e6-4712-436e-b3b6-e16dda22dd33 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "27f4b976-7dff-49b0-9b00-7515cb976e72" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.109s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1456.470975] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7968b0e6-4712-436e-b3b6-e16dda22dd33 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "27f4b976-7dff-49b0-9b00-7515cb976e72-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1456.471265] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7968b0e6-4712-436e-b3b6-e16dda22dd33 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "27f4b976-7dff-49b0-9b00-7515cb976e72-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1456.471449] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7968b0e6-4712-436e-b3b6-e16dda22dd33 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "27f4b976-7dff-49b0-9b00-7515cb976e72-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1456.473189] env[62522]: INFO nova.compute.manager [None req-7968b0e6-4712-436e-b3b6-e16dda22dd33 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Terminating instance [ 1456.790548] env[62522]: DEBUG oslo_vmware.api [None req-5ae786c8-4a00-4938-8f9f-2828a37aaa75 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416619, 'name': ReconfigVM_Task, 'duration_secs': 0.142372} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.790857] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ae786c8-4a00-4938-8f9f-2828a37aaa75 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-489890', 'volume_id': '43ae7e1b-6ba6-42c5-bd69-8d4797f4404d', 'name': 'volume-43ae7e1b-6ba6-42c5-bd69-8d4797f4404d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd4f56d59-e03f-4eaf-aa2d-b77241e13be3', 'attached_at': '', 'detached_at': '', 'volume_id': '43ae7e1b-6ba6-42c5-bd69-8d4797f4404d', 'serial': '43ae7e1b-6ba6-42c5-bd69-8d4797f4404d'} {{(pid=62522) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1456.976704] env[62522]: DEBUG nova.compute.manager [None req-7968b0e6-4712-436e-b3b6-e16dda22dd33 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1456.976941] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7968b0e6-4712-436e-b3b6-e16dda22dd33 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1456.977844] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4b5b949-455a-4a9a-8382-f30cc42ce086 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.985401] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7968b0e6-4712-436e-b3b6-e16dda22dd33 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1456.985626] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-80cc795b-9643-4957-ba9c-4f2a408a2a92 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.992119] env[62522]: DEBUG oslo_vmware.api [None req-7968b0e6-4712-436e-b3b6-e16dda22dd33 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1456.992119] env[62522]: value = "task-2416620" [ 1456.992119] env[62522]: _type = "Task" [ 1456.992119] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1457.000808] env[62522]: DEBUG oslo_vmware.api [None req-7968b0e6-4712-436e-b3b6-e16dda22dd33 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416620, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.334050] env[62522]: DEBUG nova.objects.instance [None req-5ae786c8-4a00-4938-8f9f-2828a37aaa75 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lazy-loading 'flavor' on Instance uuid d4f56d59-e03f-4eaf-aa2d-b77241e13be3 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1457.501402] env[62522]: DEBUG oslo_vmware.api [None req-7968b0e6-4712-436e-b3b6-e16dda22dd33 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416620, 'name': PowerOffVM_Task, 'duration_secs': 0.197207} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1457.501774] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-7968b0e6-4712-436e-b3b6-e16dda22dd33 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1457.501847] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7968b0e6-4712-436e-b3b6-e16dda22dd33 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1457.502065] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ad5d110a-45d9-4ed5-a959-0af6205c7c91 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.565184] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7968b0e6-4712-436e-b3b6-e16dda22dd33 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1457.565424] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7968b0e6-4712-436e-b3b6-e16dda22dd33 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1457.565609] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-7968b0e6-4712-436e-b3b6-e16dda22dd33 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Deleting the datastore file [datastore1] 27f4b976-7dff-49b0-9b00-7515cb976e72 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1457.566290] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-59e6dc9b-4a68-4e3f-b1aa-71e6bc3f004e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.576871] env[62522]: DEBUG oslo_vmware.api [None req-7968b0e6-4712-436e-b3b6-e16dda22dd33 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1457.576871] env[62522]: value = "task-2416622" [ 1457.576871] env[62522]: _type = "Task" [ 1457.576871] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1457.584474] env[62522]: DEBUG oslo_vmware.api [None req-7968b0e6-4712-436e-b3b6-e16dda22dd33 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416622, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.935919] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "b31195c2-29f4-475c-baa7-fcb4791b7278" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1457.936248] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "b31195c2-29f4-475c-baa7-fcb4791b7278" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1457.936538] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "b31195c2-29f4-475c-baa7-fcb4791b7278-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1457.936836] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "b31195c2-29f4-475c-baa7-fcb4791b7278-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1457.936993] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "b31195c2-29f4-475c-baa7-fcb4791b7278-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1457.939171] env[62522]: INFO nova.compute.manager [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Terminating instance [ 1458.086349] env[62522]: DEBUG oslo_vmware.api [None req-7968b0e6-4712-436e-b3b6-e16dda22dd33 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416622, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136379} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.086610] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-7968b0e6-4712-436e-b3b6-e16dda22dd33 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1458.086795] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7968b0e6-4712-436e-b3b6-e16dda22dd33 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1458.086984] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-7968b0e6-4712-436e-b3b6-e16dda22dd33 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1458.087159] env[62522]: INFO nova.compute.manager [None req-7968b0e6-4712-436e-b3b6-e16dda22dd33 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1458.087404] env[62522]: DEBUG oslo.service.loopingcall [None req-7968b0e6-4712-436e-b3b6-e16dda22dd33 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1458.087590] env[62522]: DEBUG nova.compute.manager [-] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1458.087685] env[62522]: DEBUG nova.network.neutron [-] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1458.341177] env[62522]: DEBUG oslo_concurrency.lockutils [None req-5ae786c8-4a00-4938-8f9f-2828a37aaa75 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "d4f56d59-e03f-4eaf-aa2d-b77241e13be3" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.210s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1458.443119] env[62522]: DEBUG nova.compute.manager [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1458.443450] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1458.444406] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a42a3f3-2ffc-4b98-b770-bbe6c3c79ccf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.453522] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1458.453831] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a6ff2ba5-c74f-42c9-8268-f5b0e11726f5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.461684] env[62522]: DEBUG oslo_vmware.api [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1458.461684] env[62522]: value = "task-2416623" [ 1458.461684] env[62522]: _type = "Task" [ 1458.461684] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.470962] env[62522]: DEBUG oslo_vmware.api [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416623, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.598336] env[62522]: DEBUG nova.compute.manager [req-e60cc55f-1d5e-4c26-8bc2-e610ca162dfc req-3d94ff28-0389-4150-9241-18df84820da4 service nova] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Received event network-vif-deleted-cf4b3978-2fa2-4182-9422-abf29faafcf6 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1458.598709] env[62522]: INFO nova.compute.manager [req-e60cc55f-1d5e-4c26-8bc2-e610ca162dfc req-3d94ff28-0389-4150-9241-18df84820da4 service nova] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Neutron deleted interface cf4b3978-2fa2-4182-9422-abf29faafcf6; detaching it from the instance and deleting it from the info cache [ 1458.598836] env[62522]: DEBUG nova.network.neutron [req-e60cc55f-1d5e-4c26-8bc2-e610ca162dfc req-3d94ff28-0389-4150-9241-18df84820da4 service nova] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1458.971501] env[62522]: DEBUG oslo_vmware.api [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416623, 'name': PowerOffVM_Task, 'duration_secs': 0.205594} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.971788] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1458.971948] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1458.972229] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-52d7d15a-c824-4704-89e7-ef13e1471a31 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.040274] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1459.040495] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1459.040676] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Deleting the datastore file [datastore2] b31195c2-29f4-475c-baa7-fcb4791b7278 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1459.041201] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c31405ac-f690-4b52-a46d-7364bdc65e78 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.047436] env[62522]: DEBUG oslo_vmware.api [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for the task: (returnval){ [ 1459.047436] env[62522]: value = "task-2416625" [ 1459.047436] env[62522]: _type = "Task" [ 1459.047436] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.054652] env[62522]: DEBUG oslo_vmware.api [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416625, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.077157] env[62522]: DEBUG nova.network.neutron [-] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1459.101574] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7c8c68f9-d354-4d81-be84-3a9710f878b1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.113063] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07744b8d-64ed-456a-993a-78546c321b2f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.140936] env[62522]: DEBUG nova.compute.manager [req-e60cc55f-1d5e-4c26-8bc2-e610ca162dfc req-3d94ff28-0389-4150-9241-18df84820da4 service nova] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Detach interface failed, port_id=cf4b3978-2fa2-4182-9422-abf29faafcf6, reason: Instance 27f4b976-7dff-49b0-9b00-7515cb976e72 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1459.393900] env[62522]: DEBUG oslo_concurrency.lockutils [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "d4f56d59-e03f-4eaf-aa2d-b77241e13be3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1459.394150] env[62522]: DEBUG oslo_concurrency.lockutils [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "d4f56d59-e03f-4eaf-aa2d-b77241e13be3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1459.394361] env[62522]: DEBUG oslo_concurrency.lockutils [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "d4f56d59-e03f-4eaf-aa2d-b77241e13be3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1459.394541] env[62522]: DEBUG oslo_concurrency.lockutils [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "d4f56d59-e03f-4eaf-aa2d-b77241e13be3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1459.394707] env[62522]: DEBUG oslo_concurrency.lockutils [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "d4f56d59-e03f-4eaf-aa2d-b77241e13be3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1459.396878] env[62522]: INFO nova.compute.manager [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Terminating instance [ 1459.558550] env[62522]: DEBUG oslo_vmware.api [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Task: {'id': task-2416625, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127632} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.558747] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1459.558939] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1459.559158] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1459.559342] env[62522]: INFO nova.compute.manager [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1459.559603] env[62522]: DEBUG oslo.service.loopingcall [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1459.559776] env[62522]: DEBUG nova.compute.manager [-] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1459.559871] env[62522]: DEBUG nova.network.neutron [-] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1459.580137] env[62522]: INFO nova.compute.manager [-] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Took 1.49 seconds to deallocate network for instance. [ 1459.900541] env[62522]: DEBUG nova.compute.manager [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1459.900916] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1459.901624] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c80bbc0b-65f7-42f5-b458-1d31f30ef066 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.909069] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1459.909305] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-019dc385-b195-4158-b6d6-a57681159731 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.915124] env[62522]: DEBUG oslo_vmware.api [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1459.915124] env[62522]: value = "task-2416626" [ 1459.915124] env[62522]: _type = "Task" [ 1459.915124] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.922728] env[62522]: DEBUG oslo_vmware.api [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416626, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.087637] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7968b0e6-4712-436e-b3b6-e16dda22dd33 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1460.087942] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7968b0e6-4712-436e-b3b6-e16dda22dd33 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1460.088178] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7968b0e6-4712-436e-b3b6-e16dda22dd33 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1460.117041] env[62522]: INFO nova.scheduler.client.report [None req-7968b0e6-4712-436e-b3b6-e16dda22dd33 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Deleted allocations for instance 27f4b976-7dff-49b0-9b00-7515cb976e72 [ 1460.317661] env[62522]: DEBUG nova.network.neutron [-] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1460.424848] env[62522]: DEBUG oslo_vmware.api [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416626, 'name': PowerOffVM_Task, 'duration_secs': 0.197867} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.425116] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1460.425286] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1460.425528] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e85645d2-e744-4297-a5c6-5eb4d9df00d8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.490774] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1460.491110] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1460.491400] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Deleting the datastore file [datastore2] d4f56d59-e03f-4eaf-aa2d-b77241e13be3 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1460.491755] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f8e2b976-ea6a-470a-a41b-0453e6422ac5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.499323] env[62522]: DEBUG oslo_vmware.api [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for the task: (returnval){ [ 1460.499323] env[62522]: value = "task-2416628" [ 1460.499323] env[62522]: _type = "Task" [ 1460.499323] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.507280] env[62522]: DEBUG oslo_vmware.api [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416628, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.625335] env[62522]: DEBUG oslo_concurrency.lockutils [None req-7968b0e6-4712-436e-b3b6-e16dda22dd33 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "27f4b976-7dff-49b0-9b00-7515cb976e72" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.154s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1460.630560] env[62522]: DEBUG nova.compute.manager [req-191d08be-ac60-4c57-bbeb-9570130c4f4d req-7647a86b-696e-4d32-8423-641a9d00f26f service nova] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Received event network-vif-deleted-58444651-b47b-44d5-b240-53949c79df86 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1460.819814] env[62522]: INFO nova.compute.manager [-] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Took 1.26 seconds to deallocate network for instance. [ 1461.010186] env[62522]: DEBUG oslo_vmware.api [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Task: {'id': task-2416628, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135179} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.010186] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1461.010640] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1461.010640] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1461.010640] env[62522]: INFO nova.compute.manager [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1461.010944] env[62522]: DEBUG oslo.service.loopingcall [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1461.011673] env[62522]: DEBUG nova.compute.manager [-] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1461.011673] env[62522]: DEBUG nova.network.neutron [-] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1461.328944] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1461.329209] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1461.329429] env[62522]: DEBUG nova.objects.instance [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lazy-loading 'resources' on Instance uuid b31195c2-29f4-475c-baa7-fcb4791b7278 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1461.487788] env[62522]: DEBUG oslo_concurrency.lockutils [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "8c9f01e4-354d-4746-a3ac-f0895ba857ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1461.488112] env[62522]: DEBUG oslo_concurrency.lockutils [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "8c9f01e4-354d-4746-a3ac-f0895ba857ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1461.891884] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aafdc48-24cf-464b-b48b-c5f15a314a33 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.902464] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df1089f3-9d30-4ec8-b0db-eae019edae4a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.942515] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e443614-d41c-40b8-afb7-2b3993d974b8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.950511] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa20b596-af52-4db5-a78b-432fa5b14a96 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.969376] env[62522]: DEBUG nova.compute.provider_tree [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1461.983044] env[62522]: DEBUG nova.network.neutron [-] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1461.990768] env[62522]: DEBUG nova.compute.manager [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1462.472531] env[62522]: DEBUG nova.scheduler.client.report [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1462.485776] env[62522]: INFO nova.compute.manager [-] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Took 1.47 seconds to deallocate network for instance. [ 1462.508317] env[62522]: DEBUG oslo_concurrency.lockutils [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1462.655641] env[62522]: DEBUG nova.compute.manager [req-da475a6e-4f88-4d65-bb79-15ef6ca28f9c req-1274a26f-f7a2-425b-90eb-ff541a3941e6 service nova] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Received event network-vif-deleted-2e6d7577-6482-4d9e-8729-5b99aa379e44 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1462.978232] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.649s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1462.980885] env[62522]: DEBUG oslo_concurrency.lockutils [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.472s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1462.983027] env[62522]: INFO nova.compute.claims [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1462.991814] env[62522]: DEBUG oslo_concurrency.lockutils [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1462.996317] env[62522]: INFO nova.scheduler.client.report [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Deleted allocations for instance b31195c2-29f4-475c-baa7-fcb4791b7278 [ 1463.502874] env[62522]: DEBUG oslo_concurrency.lockutils [None req-df20be6f-c01b-468f-b38b-27a48b8b8160 tempest-ServerActionsTestOtherA-43616242 tempest-ServerActionsTestOtherA-43616242-project-member] Lock "b31195c2-29f4-475c-baa7-fcb4791b7278" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.567s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1464.028749] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44fa4c53-b157-4f44-a943-8c0a7979b7d3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.036373] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a6f720f-4c06-4d95-a7db-2c936b2854c7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.065598] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77fa4bcb-29e4-4d2c-94ac-de82273f9c99 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.073067] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c43b62f-2497-439c-983b-a89e53babca2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.086620] env[62522]: DEBUG nova.compute.provider_tree [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1464.589655] env[62522]: DEBUG nova.scheduler.client.report [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1465.096575] env[62522]: DEBUG oslo_concurrency.lockutils [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.116s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1465.097063] env[62522]: DEBUG nova.compute.manager [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1465.100154] env[62522]: DEBUG oslo_concurrency.lockutils [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.108s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1465.104063] env[62522]: DEBUG nova.objects.instance [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lazy-loading 'resources' on Instance uuid d4f56d59-e03f-4eaf-aa2d-b77241e13be3 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1465.606021] env[62522]: DEBUG nova.compute.utils [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1465.609498] env[62522]: DEBUG nova.compute.manager [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1465.609867] env[62522]: DEBUG nova.network.neutron [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1465.657025] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6299f615-1bbc-4640-bbda-247ea2ade408 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.663000] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-709a1804-ea16-435c-b772-dbd94aa0e614 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.695965] env[62522]: DEBUG nova.policy [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '607183068c444260afbec94a63fde1d4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bab9d5d3c27d4c218b88e4a029300a66', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1465.697743] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d81983a0-666a-465c-8797-2d1da730486d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.704970] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a59792a1-36ce-48f4-9d8d-6c70f01bfab2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.717758] env[62522]: DEBUG nova.compute.provider_tree [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1466.025011] env[62522]: DEBUG nova.network.neutron [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Successfully created port: 71b25a9a-9768-4740-adf2-4b118bf2e559 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1466.109993] env[62522]: DEBUG nova.compute.manager [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1466.222494] env[62522]: DEBUG nova.scheduler.client.report [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1466.247280] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1466.728380] env[62522]: DEBUG oslo_concurrency.lockutils [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.627s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1466.748205] env[62522]: INFO nova.scheduler.client.report [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Deleted allocations for instance d4f56d59-e03f-4eaf-aa2d-b77241e13be3 [ 1467.120926] env[62522]: DEBUG nova.compute.manager [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1467.147803] env[62522]: DEBUG nova.virt.hardware [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1467.148098] env[62522]: DEBUG nova.virt.hardware [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1467.148397] env[62522]: DEBUG nova.virt.hardware [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1467.148608] env[62522]: DEBUG nova.virt.hardware [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1467.148760] env[62522]: DEBUG nova.virt.hardware [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1467.148911] env[62522]: DEBUG nova.virt.hardware [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1467.149757] env[62522]: DEBUG nova.virt.hardware [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1467.149968] env[62522]: DEBUG nova.virt.hardware [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1467.150174] env[62522]: DEBUG nova.virt.hardware [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1467.150606] env[62522]: DEBUG nova.virt.hardware [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1467.150818] env[62522]: DEBUG nova.virt.hardware [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1467.151690] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5ed5a15-e484-47e8-b54f-18d84328c1a2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.160188] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d40854b-8262-405d-b707-144b41c1f062 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.255208] env[62522]: DEBUG oslo_concurrency.lockutils [None req-278dd6d6-0266-4591-8e89-b90310e4aaf2 tempest-AttachVolumeNegativeTest-1064667517 tempest-AttachVolumeNegativeTest-1064667517-project-member] Lock "d4f56d59-e03f-4eaf-aa2d-b77241e13be3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.861s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1467.499381] env[62522]: DEBUG nova.compute.manager [req-df4b5536-6f41-4024-9e3b-9683d99fcaf4 req-43b04a42-51cd-4ea8-84f8-3eadae8643c3 service nova] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Received event network-vif-plugged-71b25a9a-9768-4740-adf2-4b118bf2e559 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1467.499601] env[62522]: DEBUG oslo_concurrency.lockutils [req-df4b5536-6f41-4024-9e3b-9683d99fcaf4 req-43b04a42-51cd-4ea8-84f8-3eadae8643c3 service nova] Acquiring lock "8c9f01e4-354d-4746-a3ac-f0895ba857ca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1467.499819] env[62522]: DEBUG oslo_concurrency.lockutils [req-df4b5536-6f41-4024-9e3b-9683d99fcaf4 req-43b04a42-51cd-4ea8-84f8-3eadae8643c3 service nova] Lock "8c9f01e4-354d-4746-a3ac-f0895ba857ca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1467.499981] env[62522]: DEBUG oslo_concurrency.lockutils [req-df4b5536-6f41-4024-9e3b-9683d99fcaf4 req-43b04a42-51cd-4ea8-84f8-3eadae8643c3 service nova] Lock "8c9f01e4-354d-4746-a3ac-f0895ba857ca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1467.500483] env[62522]: DEBUG nova.compute.manager [req-df4b5536-6f41-4024-9e3b-9683d99fcaf4 req-43b04a42-51cd-4ea8-84f8-3eadae8643c3 service nova] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] No waiting events found dispatching network-vif-plugged-71b25a9a-9768-4740-adf2-4b118bf2e559 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1467.500664] env[62522]: WARNING nova.compute.manager [req-df4b5536-6f41-4024-9e3b-9683d99fcaf4 req-43b04a42-51cd-4ea8-84f8-3eadae8643c3 service nova] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Received unexpected event network-vif-plugged-71b25a9a-9768-4740-adf2-4b118bf2e559 for instance with vm_state building and task_state spawning. [ 1467.585490] env[62522]: DEBUG nova.network.neutron [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Successfully updated port: 71b25a9a-9768-4740-adf2-4b118bf2e559 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1468.092253] env[62522]: DEBUG oslo_concurrency.lockutils [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "refresh_cache-8c9f01e4-354d-4746-a3ac-f0895ba857ca" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1468.092535] env[62522]: DEBUG oslo_concurrency.lockutils [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquired lock "refresh_cache-8c9f01e4-354d-4746-a3ac-f0895ba857ca" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1468.092574] env[62522]: DEBUG nova.network.neutron [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1468.629050] env[62522]: DEBUG nova.network.neutron [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1468.786576] env[62522]: DEBUG nova.network.neutron [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Updating instance_info_cache with network_info: [{"id": "71b25a9a-9768-4740-adf2-4b118bf2e559", "address": "fa:16:3e:22:69:0a", "network": {"id": "e3153dad-6ab7-45a3-bda4-5ebbd95258f4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-521501740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab9d5d3c27d4c218b88e4a029300a66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71b25a9a-97", "ovs_interfaceid": "71b25a9a-9768-4740-adf2-4b118bf2e559", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1469.130660] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Acquiring lock "66a86c13-cb71-4cc1-95e2-23ba29133753" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1469.131029] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Lock "66a86c13-cb71-4cc1-95e2-23ba29133753" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1469.289547] env[62522]: DEBUG oslo_concurrency.lockutils [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Releasing lock "refresh_cache-8c9f01e4-354d-4746-a3ac-f0895ba857ca" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1469.290112] env[62522]: DEBUG nova.compute.manager [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Instance network_info: |[{"id": "71b25a9a-9768-4740-adf2-4b118bf2e559", "address": "fa:16:3e:22:69:0a", "network": {"id": "e3153dad-6ab7-45a3-bda4-5ebbd95258f4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-521501740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab9d5d3c27d4c218b88e4a029300a66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71b25a9a-97", "ovs_interfaceid": "71b25a9a-9768-4740-adf2-4b118bf2e559", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1469.290359] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:22:69:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f625f389-b7cf-49b9-998a-87f3a9e3f234', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '71b25a9a-9768-4740-adf2-4b118bf2e559', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1469.298746] env[62522]: DEBUG oslo.service.loopingcall [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1469.298957] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1469.299184] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3e800f4e-1117-4b99-8cfb-5d42c5b0216d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.319599] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1469.319599] env[62522]: value = "task-2416630" [ 1469.319599] env[62522]: _type = "Task" [ 1469.319599] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.327049] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416630, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.524990] env[62522]: DEBUG nova.compute.manager [req-f14dbc33-88d6-48a9-8e6a-a1e307ff9dc3 req-2ea6665f-44b4-4f8d-a0f0-41e43ad04b7a service nova] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Received event network-changed-71b25a9a-9768-4740-adf2-4b118bf2e559 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1469.525211] env[62522]: DEBUG nova.compute.manager [req-f14dbc33-88d6-48a9-8e6a-a1e307ff9dc3 req-2ea6665f-44b4-4f8d-a0f0-41e43ad04b7a service nova] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Refreshing instance network info cache due to event network-changed-71b25a9a-9768-4740-adf2-4b118bf2e559. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1469.525519] env[62522]: DEBUG oslo_concurrency.lockutils [req-f14dbc33-88d6-48a9-8e6a-a1e307ff9dc3 req-2ea6665f-44b4-4f8d-a0f0-41e43ad04b7a service nova] Acquiring lock "refresh_cache-8c9f01e4-354d-4746-a3ac-f0895ba857ca" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1469.526313] env[62522]: DEBUG oslo_concurrency.lockutils [req-f14dbc33-88d6-48a9-8e6a-a1e307ff9dc3 req-2ea6665f-44b4-4f8d-a0f0-41e43ad04b7a service nova] Acquired lock "refresh_cache-8c9f01e4-354d-4746-a3ac-f0895ba857ca" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1469.526647] env[62522]: DEBUG nova.network.neutron [req-f14dbc33-88d6-48a9-8e6a-a1e307ff9dc3 req-2ea6665f-44b4-4f8d-a0f0-41e43ad04b7a service nova] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Refreshing network info cache for port 71b25a9a-9768-4740-adf2-4b118bf2e559 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1469.633773] env[62522]: DEBUG nova.compute.manager [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1469.835019] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416630, 'name': CreateVM_Task, 'duration_secs': 0.340729} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.835019] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1469.835019] env[62522]: DEBUG oslo_concurrency.lockutils [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1469.835019] env[62522]: DEBUG oslo_concurrency.lockutils [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1469.835019] env[62522]: DEBUG oslo_concurrency.lockutils [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1469.835019] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29a5a896-be11-4fe1-b0c3-31d9f594b141 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.837676] env[62522]: DEBUG oslo_vmware.api [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1469.837676] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526410df-7fe7-5519-6862-898cfa066d1a" [ 1469.837676] env[62522]: _type = "Task" [ 1469.837676] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.845685] env[62522]: DEBUG oslo_vmware.api [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526410df-7fe7-5519-6862-898cfa066d1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.157466] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1470.157749] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1470.159473] env[62522]: INFO nova.compute.claims [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1470.237183] env[62522]: DEBUG nova.network.neutron [req-f14dbc33-88d6-48a9-8e6a-a1e307ff9dc3 req-2ea6665f-44b4-4f8d-a0f0-41e43ad04b7a service nova] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Updated VIF entry in instance network info cache for port 71b25a9a-9768-4740-adf2-4b118bf2e559. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1470.237715] env[62522]: DEBUG nova.network.neutron [req-f14dbc33-88d6-48a9-8e6a-a1e307ff9dc3 req-2ea6665f-44b4-4f8d-a0f0-41e43ad04b7a service nova] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Updating instance_info_cache with network_info: [{"id": "71b25a9a-9768-4740-adf2-4b118bf2e559", "address": "fa:16:3e:22:69:0a", "network": {"id": "e3153dad-6ab7-45a3-bda4-5ebbd95258f4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-521501740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab9d5d3c27d4c218b88e4a029300a66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71b25a9a-97", "ovs_interfaceid": "71b25a9a-9768-4740-adf2-4b118bf2e559", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1470.348318] env[62522]: DEBUG oslo_vmware.api [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526410df-7fe7-5519-6862-898cfa066d1a, 'name': SearchDatastore_Task, 'duration_secs': 0.0104} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.348632] env[62522]: DEBUG oslo_concurrency.lockutils [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1470.348870] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1470.349113] env[62522]: DEBUG oslo_concurrency.lockutils [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1470.349300] env[62522]: DEBUG oslo_concurrency.lockutils [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1470.349525] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1470.349790] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c85d3dd4-391b-4fec-9f46-ddcf4b65a05c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.357469] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1470.357643] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1470.358386] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2a2799a-bf8d-4561-85bd-20495a222a7e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.363279] env[62522]: DEBUG oslo_vmware.api [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1470.363279] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a6e25f-7b03-d915-2236-e74a02812bbe" [ 1470.363279] env[62522]: _type = "Task" [ 1470.363279] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.370546] env[62522]: DEBUG oslo_vmware.api [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a6e25f-7b03-d915-2236-e74a02812bbe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.740656] env[62522]: DEBUG oslo_concurrency.lockutils [req-f14dbc33-88d6-48a9-8e6a-a1e307ff9dc3 req-2ea6665f-44b4-4f8d-a0f0-41e43ad04b7a service nova] Releasing lock "refresh_cache-8c9f01e4-354d-4746-a3ac-f0895ba857ca" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1470.873774] env[62522]: DEBUG oslo_vmware.api [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52a6e25f-7b03-d915-2236-e74a02812bbe, 'name': SearchDatastore_Task, 'duration_secs': 0.008988} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.874537] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50deb6d4-b196-4eab-8be5-56e06d416079 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.879728] env[62522]: DEBUG oslo_vmware.api [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1470.879728] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520b3c53-5105-d1ea-333d-0141a4164435" [ 1470.879728] env[62522]: _type = "Task" [ 1470.879728] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.887868] env[62522]: DEBUG oslo_vmware.api [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520b3c53-5105-d1ea-333d-0141a4164435, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.206583] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa1b03ad-8daf-49b8-b76f-7f5b143e1ec8 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.214134] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-977c2493-904d-4752-8f2b-198c31648399 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.243715] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-004f86be-71eb-48c2-af59-92aeab897e17 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.250578] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4baea83f-f256-46b0-be5d-8ffde1e374da {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.263192] env[62522]: DEBUG nova.compute.provider_tree [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1471.389731] env[62522]: DEBUG oslo_vmware.api [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]520b3c53-5105-d1ea-333d-0141a4164435, 'name': SearchDatastore_Task, 'duration_secs': 0.009438} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.389986] env[62522]: DEBUG oslo_concurrency.lockutils [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1471.390255] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 8c9f01e4-354d-4746-a3ac-f0895ba857ca/8c9f01e4-354d-4746-a3ac-f0895ba857ca.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1471.390501] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5fa94b97-2c77-445c-afe9-a9f5a1bfc3bb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.396816] env[62522]: DEBUG oslo_vmware.api [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1471.396816] env[62522]: value = "task-2416632" [ 1471.396816] env[62522]: _type = "Task" [ 1471.396816] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.404037] env[62522]: DEBUG oslo_vmware.api [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416632, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.766941] env[62522]: DEBUG nova.scheduler.client.report [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1471.906657] env[62522]: DEBUG oslo_vmware.api [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416632, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.439539} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.906928] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 8c9f01e4-354d-4746-a3ac-f0895ba857ca/8c9f01e4-354d-4746-a3ac-f0895ba857ca.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1471.907162] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1471.907417] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-15035568-2cc4-4963-8b46-51d20d94c16f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.915459] env[62522]: DEBUG oslo_vmware.api [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1471.915459] env[62522]: value = "task-2416633" [ 1471.915459] env[62522]: _type = "Task" [ 1471.915459] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.922673] env[62522]: DEBUG oslo_vmware.api [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416633, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.272522] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.115s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1472.273079] env[62522]: DEBUG nova.compute.manager [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1472.425497] env[62522]: DEBUG oslo_vmware.api [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416633, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071127} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.425781] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1472.426575] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6fa7a2b-b70a-4a25-b94d-a31c162e6815 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.448034] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Reconfiguring VM instance instance-00000078 to attach disk [datastore2] 8c9f01e4-354d-4746-a3ac-f0895ba857ca/8c9f01e4-354d-4746-a3ac-f0895ba857ca.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1472.448267] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bfdae3b4-524b-4169-af40-77a4dde13c74 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.466801] env[62522]: DEBUG oslo_vmware.api [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1472.466801] env[62522]: value = "task-2416634" [ 1472.466801] env[62522]: _type = "Task" [ 1472.466801] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.473741] env[62522]: DEBUG oslo_vmware.api [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416634, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.749426] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1472.778273] env[62522]: DEBUG nova.compute.utils [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1472.779775] env[62522]: DEBUG nova.compute.manager [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1472.779996] env[62522]: DEBUG nova.network.neutron [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1472.832254] env[62522]: DEBUG nova.policy [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '65d147ff42d14da297055498e7786ae5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '49ab8851e8f94ea4823c977261adf45b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1472.976258] env[62522]: DEBUG oslo_vmware.api [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416634, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.118817] env[62522]: DEBUG nova.network.neutron [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Successfully created port: 8ff00789-40b8-4d7c-9206-9fa9c9a3661f {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1473.246889] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1473.284956] env[62522]: DEBUG nova.compute.manager [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1473.476188] env[62522]: DEBUG oslo_vmware.api [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416634, 'name': ReconfigVM_Task, 'duration_secs': 0.513775} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.476531] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Reconfigured VM instance instance-00000078 to attach disk [datastore2] 8c9f01e4-354d-4746-a3ac-f0895ba857ca/8c9f01e4-354d-4746-a3ac-f0895ba857ca.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1473.477198] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d8c5b087-7205-42e7-bb44-6f74f3ad201e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.483168] env[62522]: DEBUG oslo_vmware.api [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1473.483168] env[62522]: value = "task-2416636" [ 1473.483168] env[62522]: _type = "Task" [ 1473.483168] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.490485] env[62522]: DEBUG oslo_vmware.api [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416636, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.749976] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1473.750259] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.750517] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.750719] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62522) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1473.751642] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ad3dd3e-3cb7-42f3-961d-686dafbc3750 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.759571] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca32cbd-7f4d-480a-b359-7489144eda12 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.772917] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ab3b3be-dff9-4198-8920-c1b549972efa {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.778941] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c023196-6ecb-4317-a21a-da31e28bf6df {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.811389] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180397MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=62522) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1473.811537] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1473.811709] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.993517] env[62522]: DEBUG oslo_vmware.api [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416636, 'name': Rename_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.315051] env[62522]: DEBUG nova.compute.manager [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1474.342865] env[62522]: DEBUG nova.virt.hardware [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1474.343120] env[62522]: DEBUG nova.virt.hardware [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1474.343291] env[62522]: DEBUG nova.virt.hardware [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1474.343477] env[62522]: DEBUG nova.virt.hardware [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1474.343625] env[62522]: DEBUG nova.virt.hardware [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1474.343772] env[62522]: DEBUG nova.virt.hardware [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1474.343981] env[62522]: DEBUG nova.virt.hardware [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1474.344155] env[62522]: DEBUG nova.virt.hardware [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1474.344323] env[62522]: DEBUG nova.virt.hardware [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1474.344485] env[62522]: DEBUG nova.virt.hardware [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1474.344658] env[62522]: DEBUG nova.virt.hardware [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1474.345525] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-780311e9-e22a-4ef3-8f53-8ac700d64cae {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.353966] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-083ffcbb-c818-4f5c-8091-b9d945cde511 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.494506] env[62522]: DEBUG oslo_vmware.api [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416636, 'name': Rename_Task} progress is 99%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.501674] env[62522]: DEBUG nova.compute.manager [req-c03d268a-49c6-46e2-856e-a8f3805c1388 req-ddc28f54-2aef-402e-9f88-6d0a5ccaf5bd service nova] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Received event network-vif-plugged-8ff00789-40b8-4d7c-9206-9fa9c9a3661f {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1474.501883] env[62522]: DEBUG oslo_concurrency.lockutils [req-c03d268a-49c6-46e2-856e-a8f3805c1388 req-ddc28f54-2aef-402e-9f88-6d0a5ccaf5bd service nova] Acquiring lock "66a86c13-cb71-4cc1-95e2-23ba29133753-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1474.502136] env[62522]: DEBUG oslo_concurrency.lockutils [req-c03d268a-49c6-46e2-856e-a8f3805c1388 req-ddc28f54-2aef-402e-9f88-6d0a5ccaf5bd service nova] Lock "66a86c13-cb71-4cc1-95e2-23ba29133753-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1474.502309] env[62522]: DEBUG oslo_concurrency.lockutils [req-c03d268a-49c6-46e2-856e-a8f3805c1388 req-ddc28f54-2aef-402e-9f88-6d0a5ccaf5bd service nova] Lock "66a86c13-cb71-4cc1-95e2-23ba29133753-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.502484] env[62522]: DEBUG nova.compute.manager [req-c03d268a-49c6-46e2-856e-a8f3805c1388 req-ddc28f54-2aef-402e-9f88-6d0a5ccaf5bd service nova] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] No waiting events found dispatching network-vif-plugged-8ff00789-40b8-4d7c-9206-9fa9c9a3661f {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1474.502610] env[62522]: WARNING nova.compute.manager [req-c03d268a-49c6-46e2-856e-a8f3805c1388 req-ddc28f54-2aef-402e-9f88-6d0a5ccaf5bd service nova] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Received unexpected event network-vif-plugged-8ff00789-40b8-4d7c-9206-9fa9c9a3661f for instance with vm_state building and task_state spawning. [ 1474.585665] env[62522]: DEBUG nova.network.neutron [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Successfully updated port: 8ff00789-40b8-4d7c-9206-9fa9c9a3661f {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1474.839414] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 8c9f01e4-354d-4746-a3ac-f0895ba857ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1474.839585] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 66a86c13-cb71-4cc1-95e2-23ba29133753 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1474.839758] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=62522) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1474.839898] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=62522) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1474.876250] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34222b76-e958-41f3-bacd-3894d32e8ec0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.883590] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08acd4aa-eaa6-427f-aa35-a2b3f0769f57 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.912905] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91825ab6-a0f2-43f4-9c79-62343a3d4ff0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.919501] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bf7057f-a7f4-4960-adbd-9f91760a348d {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.932033] env[62522]: DEBUG nova.compute.provider_tree [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1474.993325] env[62522]: DEBUG oslo_vmware.api [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416636, 'name': Rename_Task, 'duration_secs': 1.175721} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.993582] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1474.993813] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-887d60dd-d6f1-43ff-b286-9113f48db651 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.999662] env[62522]: DEBUG oslo_vmware.api [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1474.999662] env[62522]: value = "task-2416637" [ 1474.999662] env[62522]: _type = "Task" [ 1474.999662] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.006883] env[62522]: DEBUG oslo_vmware.api [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416637, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.088644] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Acquiring lock "refresh_cache-66a86c13-cb71-4cc1-95e2-23ba29133753" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1475.088828] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Acquired lock "refresh_cache-66a86c13-cb71-4cc1-95e2-23ba29133753" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1475.088980] env[62522]: DEBUG nova.network.neutron [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1475.435596] env[62522]: DEBUG nova.scheduler.client.report [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1475.510769] env[62522]: DEBUG oslo_vmware.api [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416637, 'name': PowerOnVM_Task, 'duration_secs': 0.467286} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.511156] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1475.511370] env[62522]: INFO nova.compute.manager [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Took 8.39 seconds to spawn the instance on the hypervisor. [ 1475.511549] env[62522]: DEBUG nova.compute.manager [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1475.512329] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2db4626d-863c-4bc4-b603-84151df7b1da {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.626763] env[62522]: DEBUG nova.network.neutron [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1475.743418] env[62522]: DEBUG nova.network.neutron [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Updating instance_info_cache with network_info: [{"id": "8ff00789-40b8-4d7c-9206-9fa9c9a3661f", "address": "fa:16:3e:97:14:de", "network": {"id": "d7eaa938-9f8a-488e-9158-32e272d26c16", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-2361194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "49ab8851e8f94ea4823c977261adf45b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ff00789-40", "ovs_interfaceid": "8ff00789-40b8-4d7c-9206-9fa9c9a3661f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1475.940564] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62522) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1475.940787] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.129s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.027580] env[62522]: INFO nova.compute.manager [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Took 13.53 seconds to build instance. [ 1476.246018] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Releasing lock "refresh_cache-66a86c13-cb71-4cc1-95e2-23ba29133753" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1476.246479] env[62522]: DEBUG nova.compute.manager [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Instance network_info: |[{"id": "8ff00789-40b8-4d7c-9206-9fa9c9a3661f", "address": "fa:16:3e:97:14:de", "network": {"id": "d7eaa938-9f8a-488e-9158-32e272d26c16", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-2361194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "49ab8851e8f94ea4823c977261adf45b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ff00789-40", "ovs_interfaceid": "8ff00789-40b8-4d7c-9206-9fa9c9a3661f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1476.247064] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:97:14:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92f3cfd6-c130-4390-8910-865fbc42afd1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8ff00789-40b8-4d7c-9206-9fa9c9a3661f', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1476.258725] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Creating folder: Project (49ab8851e8f94ea4823c977261adf45b). Parent ref: group-v489562. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1476.259087] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7a8830d2-dbfd-4105-92a7-34e48eae14a0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.271652] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Created folder: Project (49ab8851e8f94ea4823c977261adf45b) in parent group-v489562. [ 1476.271917] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Creating folder: Instances. Parent ref: group-v489892. {{(pid=62522) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1476.272245] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0faa92bf-e912-4287-a90a-204d5d849e75 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.283303] env[62522]: INFO nova.virt.vmwareapi.vm_util [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Created folder: Instances in parent group-v489892. [ 1476.283623] env[62522]: DEBUG oslo.service.loopingcall [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1476.283876] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1476.284161] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-02ced7a6-e916-4e83-b9a7-b6635e8a2606 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.309947] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1476.309947] env[62522]: value = "task-2416640" [ 1476.309947] env[62522]: _type = "Task" [ 1476.309947] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.318625] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416640, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.529564] env[62522]: DEBUG nova.compute.manager [req-b540bd44-8465-4900-ab36-d9ddbcdd762d req-876cd9e8-026b-4bd6-b536-5c19e91833d7 service nova] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Received event network-changed-8ff00789-40b8-4d7c-9206-9fa9c9a3661f {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1476.530023] env[62522]: DEBUG nova.compute.manager [req-b540bd44-8465-4900-ab36-d9ddbcdd762d req-876cd9e8-026b-4bd6-b536-5c19e91833d7 service nova] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Refreshing instance network info cache due to event network-changed-8ff00789-40b8-4d7c-9206-9fa9c9a3661f. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1476.530023] env[62522]: DEBUG oslo_concurrency.lockutils [req-b540bd44-8465-4900-ab36-d9ddbcdd762d req-876cd9e8-026b-4bd6-b536-5c19e91833d7 service nova] Acquiring lock "refresh_cache-66a86c13-cb71-4cc1-95e2-23ba29133753" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1476.530169] env[62522]: DEBUG oslo_concurrency.lockutils [req-b540bd44-8465-4900-ab36-d9ddbcdd762d req-876cd9e8-026b-4bd6-b536-5c19e91833d7 service nova] Acquired lock "refresh_cache-66a86c13-cb71-4cc1-95e2-23ba29133753" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1476.530231] env[62522]: DEBUG nova.network.neutron [req-b540bd44-8465-4900-ab36-d9ddbcdd762d req-876cd9e8-026b-4bd6-b536-5c19e91833d7 service nova] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Refreshing network info cache for port 8ff00789-40b8-4d7c-9206-9fa9c9a3661f {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1476.532108] env[62522]: DEBUG oslo_concurrency.lockutils [None req-388571a3-cdaa-41cb-aed0-034fe0700104 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "8c9f01e4-354d-4746-a3ac-f0895ba857ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.044s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.821252] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416640, 'name': CreateVM_Task, 'duration_secs': 0.33575} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.821252] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1476.821421] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1476.821459] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1476.821762] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1476.822013] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58deea64-aa12-467e-9d15-d703d9805466 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.826567] env[62522]: DEBUG oslo_vmware.api [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Waiting for the task: (returnval){ [ 1476.826567] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5289eb28-fef8-09cc-c951-6bc1ff26aaf4" [ 1476.826567] env[62522]: _type = "Task" [ 1476.826567] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.834216] env[62522]: DEBUG oslo_vmware.api [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5289eb28-fef8-09cc-c951-6bc1ff26aaf4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.936040] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1476.936499] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1476.936731] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Starting heal instance info cache {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1477.237302] env[62522]: DEBUG nova.network.neutron [req-b540bd44-8465-4900-ab36-d9ddbcdd762d req-876cd9e8-026b-4bd6-b536-5c19e91833d7 service nova] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Updated VIF entry in instance network info cache for port 8ff00789-40b8-4d7c-9206-9fa9c9a3661f. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1477.237302] env[62522]: DEBUG nova.network.neutron [req-b540bd44-8465-4900-ab36-d9ddbcdd762d req-876cd9e8-026b-4bd6-b536-5c19e91833d7 service nova] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Updating instance_info_cache with network_info: [{"id": "8ff00789-40b8-4d7c-9206-9fa9c9a3661f", "address": "fa:16:3e:97:14:de", "network": {"id": "d7eaa938-9f8a-488e-9158-32e272d26c16", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-2361194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "49ab8851e8f94ea4823c977261adf45b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ff00789-40", "ovs_interfaceid": "8ff00789-40b8-4d7c-9206-9fa9c9a3661f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1477.336984] env[62522]: DEBUG oslo_vmware.api [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]5289eb28-fef8-09cc-c951-6bc1ff26aaf4, 'name': SearchDatastore_Task, 'duration_secs': 0.018275} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.337393] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1477.337524] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1477.337734] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1477.337878] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1477.338072] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1477.338383] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-03678ab8-f07f-45ab-9f69-b78f24fc6fce {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.350444] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1477.350635] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1477.351390] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3882756-5fa3-4a94-94ea-1564de83d6ce {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.361063] env[62522]: DEBUG oslo_vmware.api [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Waiting for the task: (returnval){ [ 1477.361063] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521d4fed-43bf-0d79-1a54-488584eb91d6" [ 1477.361063] env[62522]: _type = "Task" [ 1477.361063] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.371762] env[62522]: DEBUG oslo_vmware.api [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521d4fed-43bf-0d79-1a54-488584eb91d6, 'name': SearchDatastore_Task, 'duration_secs': 0.008378} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.372578] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66552282-c988-4427-9ea3-e861e83dfe6f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.377926] env[62522]: DEBUG oslo_vmware.api [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Waiting for the task: (returnval){ [ 1477.377926] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f5d8ec-89c8-5784-ee71-45bec282c9ed" [ 1477.377926] env[62522]: _type = "Task" [ 1477.377926] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.392868] env[62522]: DEBUG oslo_vmware.api [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52f5d8ec-89c8-5784-ee71-45bec282c9ed, 'name': SearchDatastore_Task, 'duration_secs': 0.008554} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.392964] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1477.393266] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 66a86c13-cb71-4cc1-95e2-23ba29133753/66a86c13-cb71-4cc1-95e2-23ba29133753.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1477.393592] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c6fe41e0-79e4-4bd1-a6f8-0e59f0496ac6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.401449] env[62522]: DEBUG oslo_vmware.api [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Waiting for the task: (returnval){ [ 1477.401449] env[62522]: value = "task-2416641" [ 1477.401449] env[62522]: _type = "Task" [ 1477.401449] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.408758] env[62522]: DEBUG oslo_vmware.api [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': task-2416641, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.739457] env[62522]: DEBUG oslo_concurrency.lockutils [req-b540bd44-8465-4900-ab36-d9ddbcdd762d req-876cd9e8-026b-4bd6-b536-5c19e91833d7 service nova] Releasing lock "refresh_cache-66a86c13-cb71-4cc1-95e2-23ba29133753" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1477.912534] env[62522]: DEBUG oslo_vmware.api [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': task-2416641, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48575} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.912901] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 66a86c13-cb71-4cc1-95e2-23ba29133753/66a86c13-cb71-4cc1-95e2-23ba29133753.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1477.913237] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1477.913476] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4cc0d36a-c2ca-4c75-9413-b0e74ed9d464 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.921176] env[62522]: DEBUG oslo_vmware.api [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Waiting for the task: (returnval){ [ 1477.921176] env[62522]: value = "task-2416642" [ 1477.921176] env[62522]: _type = "Task" [ 1477.921176] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.929869] env[62522]: DEBUG oslo_vmware.api [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': task-2416642, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.430993] env[62522]: DEBUG oslo_vmware.api [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': task-2416642, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069082} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.431277] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1478.432049] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1037c62-6845-4921-8c0a-24664e299797 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.454630] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] 66a86c13-cb71-4cc1-95e2-23ba29133753/66a86c13-cb71-4cc1-95e2-23ba29133753.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1478.455045] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e190f10b-c000-4517-bb55-52f6dfb2b594 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.474335] env[62522]: DEBUG oslo_vmware.api [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Waiting for the task: (returnval){ [ 1478.474335] env[62522]: value = "task-2416643" [ 1478.474335] env[62522]: _type = "Task" [ 1478.474335] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.481814] env[62522]: DEBUG oslo_vmware.api [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': task-2416643, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.555770] env[62522]: DEBUG nova.compute.manager [req-e3f75194-bcbf-490b-b033-528c1c9cacda req-2d938b56-f1ab-4cbc-9298-c49f8ccae335 service nova] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Received event network-changed-71b25a9a-9768-4740-adf2-4b118bf2e559 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1478.555997] env[62522]: DEBUG nova.compute.manager [req-e3f75194-bcbf-490b-b033-528c1c9cacda req-2d938b56-f1ab-4cbc-9298-c49f8ccae335 service nova] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Refreshing instance network info cache due to event network-changed-71b25a9a-9768-4740-adf2-4b118bf2e559. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1478.556275] env[62522]: DEBUG oslo_concurrency.lockutils [req-e3f75194-bcbf-490b-b033-528c1c9cacda req-2d938b56-f1ab-4cbc-9298-c49f8ccae335 service nova] Acquiring lock "refresh_cache-8c9f01e4-354d-4746-a3ac-f0895ba857ca" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1478.556441] env[62522]: DEBUG oslo_concurrency.lockutils [req-e3f75194-bcbf-490b-b033-528c1c9cacda req-2d938b56-f1ab-4cbc-9298-c49f8ccae335 service nova] Acquired lock "refresh_cache-8c9f01e4-354d-4746-a3ac-f0895ba857ca" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1478.556734] env[62522]: DEBUG nova.network.neutron [req-e3f75194-bcbf-490b-b033-528c1c9cacda req-2d938b56-f1ab-4cbc-9298-c49f8ccae335 service nova] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Refreshing network info cache for port 71b25a9a-9768-4740-adf2-4b118bf2e559 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1478.956053] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Didn't find any instances for network info cache update. {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 1478.956493] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1478.956493] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1478.956583] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1478.956695] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1478.956936] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1478.957124] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62522) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1478.983866] env[62522]: DEBUG oslo_vmware.api [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': task-2416643, 'name': ReconfigVM_Task, 'duration_secs': 0.269788} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.984147] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Reconfigured VM instance instance-00000079 to attach disk [datastore1] 66a86c13-cb71-4cc1-95e2-23ba29133753/66a86c13-cb71-4cc1-95e2-23ba29133753.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1478.984797] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-746ceb5e-bb6b-44e7-ae91-b822be23d0a2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.991179] env[62522]: DEBUG oslo_vmware.api [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Waiting for the task: (returnval){ [ 1478.991179] env[62522]: value = "task-2416644" [ 1478.991179] env[62522]: _type = "Task" [ 1478.991179] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.998306] env[62522]: DEBUG oslo_vmware.api [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': task-2416644, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.247482] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1479.247645] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Cleaning up deleted instances with incomplete migration {{(pid=62522) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11783}} [ 1479.274696] env[62522]: DEBUG nova.network.neutron [req-e3f75194-bcbf-490b-b033-528c1c9cacda req-2d938b56-f1ab-4cbc-9298-c49f8ccae335 service nova] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Updated VIF entry in instance network info cache for port 71b25a9a-9768-4740-adf2-4b118bf2e559. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1479.275065] env[62522]: DEBUG nova.network.neutron [req-e3f75194-bcbf-490b-b033-528c1c9cacda req-2d938b56-f1ab-4cbc-9298-c49f8ccae335 service nova] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Updating instance_info_cache with network_info: [{"id": "71b25a9a-9768-4740-adf2-4b118bf2e559", "address": "fa:16:3e:22:69:0a", "network": {"id": "e3153dad-6ab7-45a3-bda4-5ebbd95258f4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-521501740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab9d5d3c27d4c218b88e4a029300a66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71b25a9a-97", "ovs_interfaceid": "71b25a9a-9768-4740-adf2-4b118bf2e559", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1479.502396] env[62522]: DEBUG oslo_vmware.api [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': task-2416644, 'name': Rename_Task, 'duration_secs': 0.135677} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.502678] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1479.502924] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fa2e173c-edee-4e0c-800c-ee28a857be9b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.509683] env[62522]: DEBUG oslo_vmware.api [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Waiting for the task: (returnval){ [ 1479.509683] env[62522]: value = "task-2416645" [ 1479.509683] env[62522]: _type = "Task" [ 1479.509683] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.516968] env[62522]: DEBUG oslo_vmware.api [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': task-2416645, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.777751] env[62522]: DEBUG oslo_concurrency.lockutils [req-e3f75194-bcbf-490b-b033-528c1c9cacda req-2d938b56-f1ab-4cbc-9298-c49f8ccae335 service nova] Releasing lock "refresh_cache-8c9f01e4-354d-4746-a3ac-f0895ba857ca" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1480.019702] env[62522]: DEBUG oslo_vmware.api [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': task-2416645, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.520999] env[62522]: DEBUG oslo_vmware.api [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': task-2416645, 'name': PowerOnVM_Task, 'duration_secs': 0.75999} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.521300] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1480.521419] env[62522]: INFO nova.compute.manager [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Took 6.21 seconds to spawn the instance on the hypervisor. [ 1480.521598] env[62522]: DEBUG nova.compute.manager [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1480.522357] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-433b7589-c3d7-469a-b4dc-6790024e29e3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.038315] env[62522]: INFO nova.compute.manager [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Took 10.90 seconds to build instance. [ 1481.540909] env[62522]: DEBUG oslo_concurrency.lockutils [None req-8254b991-dd85-40fc-9989-220c7634531e tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Lock "66a86c13-cb71-4cc1-95e2-23ba29133753" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.410s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1482.282515] env[62522]: INFO nova.compute.manager [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Rescuing [ 1482.282857] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Acquiring lock "refresh_cache-66a86c13-cb71-4cc1-95e2-23ba29133753" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1482.282976] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Acquired lock "refresh_cache-66a86c13-cb71-4cc1-95e2-23ba29133753" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1482.283145] env[62522]: DEBUG nova.network.neutron [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1483.007606] env[62522]: DEBUG nova.network.neutron [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Updating instance_info_cache with network_info: [{"id": "8ff00789-40b8-4d7c-9206-9fa9c9a3661f", "address": "fa:16:3e:97:14:de", "network": {"id": "d7eaa938-9f8a-488e-9158-32e272d26c16", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-2361194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "49ab8851e8f94ea4823c977261adf45b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ff00789-40", "ovs_interfaceid": "8ff00789-40b8-4d7c-9206-9fa9c9a3661f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1483.510241] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Releasing lock "refresh_cache-66a86c13-cb71-4cc1-95e2-23ba29133753" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1485.046961] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1485.047438] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2501b888-36dd-4c5a-8bc6-e0a875a4bdbf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.058466] env[62522]: DEBUG oslo_vmware.api [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Waiting for the task: (returnval){ [ 1485.058466] env[62522]: value = "task-2416646" [ 1485.058466] env[62522]: _type = "Task" [ 1485.058466] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.066616] env[62522]: DEBUG oslo_vmware.api [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': task-2416646, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.568506] env[62522]: DEBUG oslo_vmware.api [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': task-2416646, 'name': PowerOffVM_Task, 'duration_secs': 0.179414} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.568777] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1485.569552] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3242604c-06e8-4d6f-ae77-9af0792e4a32 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.587520] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25d06ae3-f30c-45ac-bf69-dc809845d194 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.612630] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1485.612868] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a14302ba-512d-4c58-9a8f-98d313037932 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.618742] env[62522]: DEBUG oslo_vmware.api [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Waiting for the task: (returnval){ [ 1485.618742] env[62522]: value = "task-2416647" [ 1485.618742] env[62522]: _type = "Task" [ 1485.618742] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.625741] env[62522]: DEBUG oslo_vmware.api [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': task-2416647, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.129374] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] VM already powered off {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1486.129767] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1486.129828] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1486.129961] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1486.130160] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1486.130404] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4077e975-b73e-4afe-ae76-07174638f26f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.138838] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1486.139008] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1486.139654] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c18150b-df38-4bb7-a39e-ae088493b98b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.144250] env[62522]: DEBUG oslo_vmware.api [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Waiting for the task: (returnval){ [ 1486.144250] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c8e89f-1cfc-2511-59b2-d04b367fa0ba" [ 1486.144250] env[62522]: _type = "Task" [ 1486.144250] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.151331] env[62522]: DEBUG oslo_vmware.api [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c8e89f-1cfc-2511-59b2-d04b367fa0ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.653994] env[62522]: DEBUG oslo_vmware.api [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52c8e89f-1cfc-2511-59b2-d04b367fa0ba, 'name': SearchDatastore_Task, 'duration_secs': 0.008088} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.654738] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8e74622-2985-4aba-83ba-f14dade344db {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.659410] env[62522]: DEBUG oslo_vmware.api [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Waiting for the task: (returnval){ [ 1486.659410] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525704a4-dc20-c637-6bd3-415835fe252d" [ 1486.659410] env[62522]: _type = "Task" [ 1486.659410] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.666582] env[62522]: DEBUG oslo_vmware.api [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525704a4-dc20-c637-6bd3-415835fe252d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.170297] env[62522]: DEBUG oslo_vmware.api [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525704a4-dc20-c637-6bd3-415835fe252d, 'name': SearchDatastore_Task, 'duration_secs': 0.008754} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.170702] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1487.170818] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 66a86c13-cb71-4cc1-95e2-23ba29133753/2ee4561b-ba48-4f45-82f6-eac89be98290-rescue.vmdk. {{(pid=62522) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1487.171072] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4463fc21-72c8-493e-947d-7f1d5077c700 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.177638] env[62522]: DEBUG oslo_vmware.api [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Waiting for the task: (returnval){ [ 1487.177638] env[62522]: value = "task-2416648" [ 1487.177638] env[62522]: _type = "Task" [ 1487.177638] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.184292] env[62522]: DEBUG oslo_vmware.api [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': task-2416648, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.688226] env[62522]: DEBUG oslo_vmware.api [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': task-2416648, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.433156} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.688434] env[62522]: INFO nova.virt.vmwareapi.ds_util [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore1] 66a86c13-cb71-4cc1-95e2-23ba29133753/2ee4561b-ba48-4f45-82f6-eac89be98290-rescue.vmdk. [ 1487.689206] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c079cd5-411e-4aaa-8708-09c0b603375b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.713717] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] 66a86c13-cb71-4cc1-95e2-23ba29133753/2ee4561b-ba48-4f45-82f6-eac89be98290-rescue.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1487.713956] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8e1a0c9-a94f-4f3f-a73c-a16b4707d9a0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.732239] env[62522]: DEBUG oslo_vmware.api [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Waiting for the task: (returnval){ [ 1487.732239] env[62522]: value = "task-2416649" [ 1487.732239] env[62522]: _type = "Task" [ 1487.732239] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.739966] env[62522]: DEBUG oslo_vmware.api [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': task-2416649, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.242552] env[62522]: DEBUG oslo_vmware.api [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': task-2416649, 'name': ReconfigVM_Task, 'duration_secs': 0.288665} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.242552] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Reconfigured VM instance instance-00000079 to attach disk [datastore1] 66a86c13-cb71-4cc1-95e2-23ba29133753/2ee4561b-ba48-4f45-82f6-eac89be98290-rescue.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1488.243384] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89bfc4d1-cde9-4655-bf16-a1095f882305 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.268448] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6a8becc-e2f8-4dbf-a1c9-8477df0c0679 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.283433] env[62522]: DEBUG oslo_vmware.api [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Waiting for the task: (returnval){ [ 1488.283433] env[62522]: value = "task-2416650" [ 1488.283433] env[62522]: _type = "Task" [ 1488.283433] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.290641] env[62522]: DEBUG oslo_vmware.api [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': task-2416650, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.792727] env[62522]: DEBUG oslo_vmware.api [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': task-2416650, 'name': ReconfigVM_Task, 'duration_secs': 0.257604} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.792998] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1488.793258] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fe5dd204-2f5e-46eb-bc0e-0f7d9b8c6750 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.799161] env[62522]: DEBUG oslo_vmware.api [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Waiting for the task: (returnval){ [ 1488.799161] env[62522]: value = "task-2416651" [ 1488.799161] env[62522]: _type = "Task" [ 1488.799161] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.806010] env[62522]: DEBUG oslo_vmware.api [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': task-2416651, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.309729] env[62522]: DEBUG oslo_vmware.api [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': task-2416651, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.810338] env[62522]: DEBUG oslo_vmware.api [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': task-2416651, 'name': PowerOnVM_Task, 'duration_secs': 0.564184} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.810620] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1489.813431] env[62522]: DEBUG nova.compute.manager [None req-3c749722-060d-4dcf-bf9f-d60a11776f82 tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1489.814350] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c07889-68cb-4f80-ba1e-b41f83977314 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.205729] env[62522]: DEBUG nova.compute.manager [req-2a622807-1e2d-4c13-b363-cdcc2fe75527 req-829949e4-8bde-4208-908a-ced9ac8b216c service nova] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Received event network-changed-8ff00789-40b8-4d7c-9206-9fa9c9a3661f {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1491.205985] env[62522]: DEBUG nova.compute.manager [req-2a622807-1e2d-4c13-b363-cdcc2fe75527 req-829949e4-8bde-4208-908a-ced9ac8b216c service nova] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Refreshing instance network info cache due to event network-changed-8ff00789-40b8-4d7c-9206-9fa9c9a3661f. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1491.206188] env[62522]: DEBUG oslo_concurrency.lockutils [req-2a622807-1e2d-4c13-b363-cdcc2fe75527 req-829949e4-8bde-4208-908a-ced9ac8b216c service nova] Acquiring lock "refresh_cache-66a86c13-cb71-4cc1-95e2-23ba29133753" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1491.206351] env[62522]: DEBUG oslo_concurrency.lockutils [req-2a622807-1e2d-4c13-b363-cdcc2fe75527 req-829949e4-8bde-4208-908a-ced9ac8b216c service nova] Acquired lock "refresh_cache-66a86c13-cb71-4cc1-95e2-23ba29133753" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1491.206520] env[62522]: DEBUG nova.network.neutron [req-2a622807-1e2d-4c13-b363-cdcc2fe75527 req-829949e4-8bde-4208-908a-ced9ac8b216c service nova] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Refreshing network info cache for port 8ff00789-40b8-4d7c-9206-9fa9c9a3661f {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1491.989115] env[62522]: DEBUG nova.network.neutron [req-2a622807-1e2d-4c13-b363-cdcc2fe75527 req-829949e4-8bde-4208-908a-ced9ac8b216c service nova] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Updated VIF entry in instance network info cache for port 8ff00789-40b8-4d7c-9206-9fa9c9a3661f. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1491.989547] env[62522]: DEBUG nova.network.neutron [req-2a622807-1e2d-4c13-b363-cdcc2fe75527 req-829949e4-8bde-4208-908a-ced9ac8b216c service nova] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Updating instance_info_cache with network_info: [{"id": "8ff00789-40b8-4d7c-9206-9fa9c9a3661f", "address": "fa:16:3e:97:14:de", "network": {"id": "d7eaa938-9f8a-488e-9158-32e272d26c16", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-2361194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "49ab8851e8f94ea4823c977261adf45b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ff00789-40", "ovs_interfaceid": "8ff00789-40b8-4d7c-9206-9fa9c9a3661f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1492.492343] env[62522]: DEBUG oslo_concurrency.lockutils [req-2a622807-1e2d-4c13-b363-cdcc2fe75527 req-829949e4-8bde-4208-908a-ced9ac8b216c service nova] Releasing lock "refresh_cache-66a86c13-cb71-4cc1-95e2-23ba29133753" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1493.231171] env[62522]: DEBUG nova.compute.manager [req-6a620738-5a14-4909-b84d-3f03aad27de6 req-5820bf4a-f40a-42c1-a74b-8ec1160ae21a service nova] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Received event network-changed-8ff00789-40b8-4d7c-9206-9fa9c9a3661f {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1493.231356] env[62522]: DEBUG nova.compute.manager [req-6a620738-5a14-4909-b84d-3f03aad27de6 req-5820bf4a-f40a-42c1-a74b-8ec1160ae21a service nova] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Refreshing instance network info cache due to event network-changed-8ff00789-40b8-4d7c-9206-9fa9c9a3661f. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1493.231548] env[62522]: DEBUG oslo_concurrency.lockutils [req-6a620738-5a14-4909-b84d-3f03aad27de6 req-5820bf4a-f40a-42c1-a74b-8ec1160ae21a service nova] Acquiring lock "refresh_cache-66a86c13-cb71-4cc1-95e2-23ba29133753" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1493.231694] env[62522]: DEBUG oslo_concurrency.lockutils [req-6a620738-5a14-4909-b84d-3f03aad27de6 req-5820bf4a-f40a-42c1-a74b-8ec1160ae21a service nova] Acquired lock "refresh_cache-66a86c13-cb71-4cc1-95e2-23ba29133753" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1493.231855] env[62522]: DEBUG nova.network.neutron [req-6a620738-5a14-4909-b84d-3f03aad27de6 req-5820bf4a-f40a-42c1-a74b-8ec1160ae21a service nova] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Refreshing network info cache for port 8ff00789-40b8-4d7c-9206-9fa9c9a3661f {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1493.749922] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1493.750266] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Cleaning up deleted instances {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11745}} [ 1494.026052] env[62522]: DEBUG nova.network.neutron [req-6a620738-5a14-4909-b84d-3f03aad27de6 req-5820bf4a-f40a-42c1-a74b-8ec1160ae21a service nova] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Updated VIF entry in instance network info cache for port 8ff00789-40b8-4d7c-9206-9fa9c9a3661f. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1494.026052] env[62522]: DEBUG nova.network.neutron [req-6a620738-5a14-4909-b84d-3f03aad27de6 req-5820bf4a-f40a-42c1-a74b-8ec1160ae21a service nova] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Updating instance_info_cache with network_info: [{"id": "8ff00789-40b8-4d7c-9206-9fa9c9a3661f", "address": "fa:16:3e:97:14:de", "network": {"id": "d7eaa938-9f8a-488e-9158-32e272d26c16", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-2361194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "49ab8851e8f94ea4823c977261adf45b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ff00789-40", "ovs_interfaceid": "8ff00789-40b8-4d7c-9206-9fa9c9a3661f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1494.257247] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] There are 20 instances to clean {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11754}} [ 1494.257440] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: d4f56d59-e03f-4eaf-aa2d-b77241e13be3] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1494.528286] env[62522]: DEBUG oslo_concurrency.lockutils [req-6a620738-5a14-4909-b84d-3f03aad27de6 req-5820bf4a-f40a-42c1-a74b-8ec1160ae21a service nova] Releasing lock "refresh_cache-66a86c13-cb71-4cc1-95e2-23ba29133753" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1494.528464] env[62522]: DEBUG nova.compute.manager [req-6a620738-5a14-4909-b84d-3f03aad27de6 req-5820bf4a-f40a-42c1-a74b-8ec1160ae21a service nova] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Received event network-changed-8ff00789-40b8-4d7c-9206-9fa9c9a3661f {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1494.528661] env[62522]: DEBUG nova.compute.manager [req-6a620738-5a14-4909-b84d-3f03aad27de6 req-5820bf4a-f40a-42c1-a74b-8ec1160ae21a service nova] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Refreshing instance network info cache due to event network-changed-8ff00789-40b8-4d7c-9206-9fa9c9a3661f. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1494.528875] env[62522]: DEBUG oslo_concurrency.lockutils [req-6a620738-5a14-4909-b84d-3f03aad27de6 req-5820bf4a-f40a-42c1-a74b-8ec1160ae21a service nova] Acquiring lock "refresh_cache-66a86c13-cb71-4cc1-95e2-23ba29133753" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1494.529027] env[62522]: DEBUG oslo_concurrency.lockutils [req-6a620738-5a14-4909-b84d-3f03aad27de6 req-5820bf4a-f40a-42c1-a74b-8ec1160ae21a service nova] Acquired lock "refresh_cache-66a86c13-cb71-4cc1-95e2-23ba29133753" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1494.529201] env[62522]: DEBUG nova.network.neutron [req-6a620738-5a14-4909-b84d-3f03aad27de6 req-5820bf4a-f40a-42c1-a74b-8ec1160ae21a service nova] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Refreshing network info cache for port 8ff00789-40b8-4d7c-9206-9fa9c9a3661f {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1494.638778] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Acquiring lock "66a86c13-cb71-4cc1-95e2-23ba29133753" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1494.639054] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Lock "66a86c13-cb71-4cc1-95e2-23ba29133753" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1494.639274] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Acquiring lock "66a86c13-cb71-4cc1-95e2-23ba29133753-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1494.639466] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Lock "66a86c13-cb71-4cc1-95e2-23ba29133753-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1494.639636] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Lock "66a86c13-cb71-4cc1-95e2-23ba29133753-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1494.641809] env[62522]: INFO nova.compute.manager [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Terminating instance [ 1494.760881] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: c464ae64-056f-4629-add9-2ff7a1971ebb] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1495.145451] env[62522]: DEBUG nova.compute.manager [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1495.145671] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1495.146597] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1dba789-0953-4963-8f83-27cbe3de7e30 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.155878] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1495.156121] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c719b805-e86e-4b6e-a90a-2b44f2688f28 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.161872] env[62522]: DEBUG oslo_vmware.api [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Waiting for the task: (returnval){ [ 1495.161872] env[62522]: value = "task-2416652" [ 1495.161872] env[62522]: _type = "Task" [ 1495.161872] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.169073] env[62522]: DEBUG oslo_vmware.api [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': task-2416652, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.217973] env[62522]: DEBUG nova.network.neutron [req-6a620738-5a14-4909-b84d-3f03aad27de6 req-5820bf4a-f40a-42c1-a74b-8ec1160ae21a service nova] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Updated VIF entry in instance network info cache for port 8ff00789-40b8-4d7c-9206-9fa9c9a3661f. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1495.218468] env[62522]: DEBUG nova.network.neutron [req-6a620738-5a14-4909-b84d-3f03aad27de6 req-5820bf4a-f40a-42c1-a74b-8ec1160ae21a service nova] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Updating instance_info_cache with network_info: [{"id": "8ff00789-40b8-4d7c-9206-9fa9c9a3661f", "address": "fa:16:3e:97:14:de", "network": {"id": "d7eaa938-9f8a-488e-9158-32e272d26c16", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-2361194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "49ab8851e8f94ea4823c977261adf45b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ff00789-40", "ovs_interfaceid": "8ff00789-40b8-4d7c-9206-9fa9c9a3661f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1495.256710] env[62522]: DEBUG nova.compute.manager [req-85004ff5-ae3a-4fc0-bdf7-ac330ef130b0 req-171e3a0c-fc9c-443a-889d-e9a859a777a4 service nova] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Received event network-changed-8ff00789-40b8-4d7c-9206-9fa9c9a3661f {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1495.256772] env[62522]: DEBUG nova.compute.manager [req-85004ff5-ae3a-4fc0-bdf7-ac330ef130b0 req-171e3a0c-fc9c-443a-889d-e9a859a777a4 service nova] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Refreshing instance network info cache due to event network-changed-8ff00789-40b8-4d7c-9206-9fa9c9a3661f. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1495.257016] env[62522]: DEBUG oslo_concurrency.lockutils [req-85004ff5-ae3a-4fc0-bdf7-ac330ef130b0 req-171e3a0c-fc9c-443a-889d-e9a859a777a4 service nova] Acquiring lock "refresh_cache-66a86c13-cb71-4cc1-95e2-23ba29133753" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1495.263491] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 75ba1afc-3586-4bb0-ae7f-ebf5a794f068] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1495.671540] env[62522]: DEBUG oslo_vmware.api [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': task-2416652, 'name': PowerOffVM_Task, 'duration_secs': 0.193653} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.671858] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1495.672053] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1495.672308] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3ce86d38-38ce-4e7a-a1bc-06f5fda22843 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.720593] env[62522]: DEBUG oslo_concurrency.lockutils [req-6a620738-5a14-4909-b84d-3f03aad27de6 req-5820bf4a-f40a-42c1-a74b-8ec1160ae21a service nova] Releasing lock "refresh_cache-66a86c13-cb71-4cc1-95e2-23ba29133753" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1495.721010] env[62522]: DEBUG oslo_concurrency.lockutils [req-85004ff5-ae3a-4fc0-bdf7-ac330ef130b0 req-171e3a0c-fc9c-443a-889d-e9a859a777a4 service nova] Acquired lock "refresh_cache-66a86c13-cb71-4cc1-95e2-23ba29133753" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1495.721216] env[62522]: DEBUG nova.network.neutron [req-85004ff5-ae3a-4fc0-bdf7-ac330ef130b0 req-171e3a0c-fc9c-443a-889d-e9a859a777a4 service nova] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Refreshing network info cache for port 8ff00789-40b8-4d7c-9206-9fa9c9a3661f {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1495.739360] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1495.739570] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Deleting contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1495.739749] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Deleting the datastore file [datastore1] 66a86c13-cb71-4cc1-95e2-23ba29133753 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1495.740014] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0e3264c9-cec1-407f-9c6a-58f22a0896b0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.746860] env[62522]: DEBUG oslo_vmware.api [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Waiting for the task: (returnval){ [ 1495.746860] env[62522]: value = "task-2416654" [ 1495.746860] env[62522]: _type = "Task" [ 1495.746860] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.754425] env[62522]: DEBUG oslo_vmware.api [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': task-2416654, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.766200] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: c95f697b-0d68-489d-bfc4-9d129eab1be2] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1496.258384] env[62522]: DEBUG oslo_vmware.api [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Task: {'id': task-2416654, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162131} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.258644] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1496.258824] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Deleted contents of the VM from datastore datastore1 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1496.258999] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1496.259186] env[62522]: INFO nova.compute.manager [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1496.259426] env[62522]: DEBUG oslo.service.loopingcall [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1496.259625] env[62522]: DEBUG nova.compute.manager [-] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1496.259709] env[62522]: DEBUG nova.network.neutron [-] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1496.268777] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: fd9af7c3-358e-417f-97f4-fd2d67d21300] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1496.529259] env[62522]: DEBUG nova.network.neutron [req-85004ff5-ae3a-4fc0-bdf7-ac330ef130b0 req-171e3a0c-fc9c-443a-889d-e9a859a777a4 service nova] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Updated VIF entry in instance network info cache for port 8ff00789-40b8-4d7c-9206-9fa9c9a3661f. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1496.529625] env[62522]: DEBUG nova.network.neutron [req-85004ff5-ae3a-4fc0-bdf7-ac330ef130b0 req-171e3a0c-fc9c-443a-889d-e9a859a777a4 service nova] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Updating instance_info_cache with network_info: [{"id": "8ff00789-40b8-4d7c-9206-9fa9c9a3661f", "address": "fa:16:3e:97:14:de", "network": {"id": "d7eaa938-9f8a-488e-9158-32e272d26c16", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-2361194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "49ab8851e8f94ea4823c977261adf45b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ff00789-40", "ovs_interfaceid": "8ff00789-40b8-4d7c-9206-9fa9c9a3661f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1496.771768] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: ecc70761-8f69-48f6-8e81-7d2ba3728c70] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1497.031802] env[62522]: DEBUG oslo_concurrency.lockutils [req-85004ff5-ae3a-4fc0-bdf7-ac330ef130b0 req-171e3a0c-fc9c-443a-889d-e9a859a777a4 service nova] Releasing lock "refresh_cache-66a86c13-cb71-4cc1-95e2-23ba29133753" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1497.131862] env[62522]: DEBUG nova.network.neutron [-] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1497.274256] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: da11bae6-484b-455e-9462-6f5143d2a9a9] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1497.280461] env[62522]: DEBUG nova.compute.manager [req-cc73dcc1-9cd1-4a9f-940f-461fbfd44457 req-2b9a4aba-80f2-4595-b41d-c892d0c3bded service nova] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Received event network-vif-deleted-8ff00789-40b8-4d7c-9206-9fa9c9a3661f {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1497.634290] env[62522]: INFO nova.compute.manager [-] [instance: 66a86c13-cb71-4cc1-95e2-23ba29133753] Took 1.37 seconds to deallocate network for instance. [ 1497.776944] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 961df2ff-bd02-45af-afb8-14a99cfea1de] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1498.140966] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1498.141245] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1498.141467] env[62522]: DEBUG nova.objects.instance [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Lazy-loading 'resources' on Instance uuid 66a86c13-cb71-4cc1-95e2-23ba29133753 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1498.279792] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: f74196c1-b00f-4f42-84dc-17b21fa30374] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1498.783607] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 0ba51973-2ffe-460c-a4e2-c9e2a2b768b1] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1498.799688] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77afd052-2a74-4095-ac06-a1119c49f138 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.807718] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-642b6f74-d38b-4ef4-a223-d348ce96c67f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.837672] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-106d5858-d3d6-454c-b04f-257fa58b1a9f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.844505] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-855cc680-6311-41f8-9997-43d4e8eef787 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.857308] env[62522]: DEBUG nova.compute.provider_tree [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1499.286929] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 7406a1a4-a342-475b-ad02-6a29f7c487ee] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1499.360216] env[62522]: DEBUG nova.scheduler.client.report [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1499.790832] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: bbb8ba81-9fed-419c-b2f9-ac5baaac3b88] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1499.864694] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.723s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.882000] env[62522]: INFO nova.scheduler.client.report [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Deleted allocations for instance 66a86c13-cb71-4cc1-95e2-23ba29133753 [ 1500.293993] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 27f4b976-7dff-49b0-9b00-7515cb976e72] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1500.389026] env[62522]: DEBUG oslo_concurrency.lockutils [None req-bc64f79f-a215-48aa-9279-988e3dcea83d tempest-ServerRescueTestJSONUnderV235-1749866034 tempest-ServerRescueTestJSONUnderV235-1749866034-project-member] Lock "66a86c13-cb71-4cc1-95e2-23ba29133753" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.750s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1500.797851] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: a4cb5c19-9087-4354-9689-a99ae8924dc1] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1501.300969] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: cb7a19f1-6093-47ee-bbbc-a75dd5423f32] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1501.804386] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 08d7e318-ea68-4807-a300-ee4a7993647d] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1502.308354] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 1c6451e0-2fae-4d2b-86d7-86f9537a6259] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1502.812812] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: f3894644-eb7e-4a6d-9029-4cd30466d6f8] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1503.316844] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: b31195c2-29f4-475c-baa7-fcb4791b7278] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1503.820065] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 892926ef-3044-497c-8fc8-30cd298e4311] Instance has had 0 of 5 cleanup attempts {{(pid=62522) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1515.088195] env[62522]: DEBUG nova.compute.manager [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Stashing vm_state: active {{(pid=62522) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1515.608491] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1515.608755] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1516.113366] env[62522]: INFO nova.compute.claims [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1516.619736] env[62522]: INFO nova.compute.resource_tracker [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Updating resource usage from migration ad6c2f50-520b-482a-b99b-8861598e1b02 [ 1516.656754] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-162dd3d8-8187-4cb7-9be1-de6f1963dd7f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.663521] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34f6cee6-9793-4f5e-9679-95027e22f616 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.692170] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ae3cdd5-76bf-4107-9268-73063dbd57c5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.698779] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47adc6c7-41ba-4861-ab96-1706e3742e4e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.711234] env[62522]: DEBUG nova.compute.provider_tree [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1517.214950] env[62522]: DEBUG nova.scheduler.client.report [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1517.720062] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.111s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1517.720313] env[62522]: INFO nova.compute.manager [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Migrating [ 1518.235810] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "refresh_cache-8c9f01e4-354d-4746-a3ac-f0895ba857ca" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1518.236098] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquired lock "refresh_cache-8c9f01e4-354d-4746-a3ac-f0895ba857ca" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1518.236205] env[62522]: DEBUG nova.network.neutron [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1518.944738] env[62522]: DEBUG nova.network.neutron [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Updating instance_info_cache with network_info: [{"id": "71b25a9a-9768-4740-adf2-4b118bf2e559", "address": "fa:16:3e:22:69:0a", "network": {"id": "e3153dad-6ab7-45a3-bda4-5ebbd95258f4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-521501740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab9d5d3c27d4c218b88e4a029300a66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71b25a9a-97", "ovs_interfaceid": "71b25a9a-9768-4740-adf2-4b118bf2e559", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1519.447024] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Releasing lock "refresh_cache-8c9f01e4-354d-4746-a3ac-f0895ba857ca" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1520.961797] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35f5b5bb-7ce7-4139-9622-76b1b21379e3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.981095] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Updating instance '8c9f01e4-354d-4746-a3ac-f0895ba857ca' progress to 0 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1521.487808] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1521.488156] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de68b8aa-470b-4111-b06c-cf30698efa28 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.497057] env[62522]: DEBUG oslo_vmware.api [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1521.497057] env[62522]: value = "task-2416655" [ 1521.497057] env[62522]: _type = "Task" [ 1521.497057] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.506491] env[62522]: DEBUG oslo_vmware.api [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416655, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.006687] env[62522]: DEBUG oslo_vmware.api [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416655, 'name': PowerOffVM_Task, 'duration_secs': 0.197374} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.007072] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1522.007133] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Updating instance '8c9f01e4-354d-4746-a3ac-f0895ba857ca' progress to 17 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1522.514154] env[62522]: DEBUG nova.virt.hardware [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:04Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1522.514406] env[62522]: DEBUG nova.virt.hardware [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1522.514598] env[62522]: DEBUG nova.virt.hardware [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1522.514800] env[62522]: DEBUG nova.virt.hardware [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1522.514948] env[62522]: DEBUG nova.virt.hardware [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1522.515110] env[62522]: DEBUG nova.virt.hardware [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1522.515319] env[62522]: DEBUG nova.virt.hardware [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1522.515476] env[62522]: DEBUG nova.virt.hardware [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1522.515640] env[62522]: DEBUG nova.virt.hardware [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1522.515798] env[62522]: DEBUG nova.virt.hardware [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1522.515968] env[62522]: DEBUG nova.virt.hardware [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1522.520927] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-38f174a6-62c6-4302-85c4-792e6869b3f0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.536756] env[62522]: DEBUG oslo_vmware.api [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1522.536756] env[62522]: value = "task-2416656" [ 1522.536756] env[62522]: _type = "Task" [ 1522.536756] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.544625] env[62522]: DEBUG oslo_vmware.api [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416656, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.046389] env[62522]: DEBUG oslo_vmware.api [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416656, 'name': ReconfigVM_Task, 'duration_secs': 0.182232} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.046806] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Updating instance '8c9f01e4-354d-4746-a3ac-f0895ba857ca' progress to 33 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1523.554015] env[62522]: DEBUG nova.virt.hardware [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1523.554015] env[62522]: DEBUG nova.virt.hardware [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1523.554353] env[62522]: DEBUG nova.virt.hardware [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1523.554353] env[62522]: DEBUG nova.virt.hardware [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1523.554439] env[62522]: DEBUG nova.virt.hardware [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1523.554587] env[62522]: DEBUG nova.virt.hardware [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1523.554848] env[62522]: DEBUG nova.virt.hardware [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1523.555020] env[62522]: DEBUG nova.virt.hardware [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1523.555190] env[62522]: DEBUG nova.virt.hardware [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1523.555355] env[62522]: DEBUG nova.virt.hardware [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1523.555527] env[62522]: DEBUG nova.virt.hardware [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1523.560859] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Reconfiguring VM instance instance-00000078 to detach disk 2000 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1523.561164] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-64612cc2-8412-462b-9808-b63aa7847d29 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.580961] env[62522]: DEBUG oslo_vmware.api [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1523.580961] env[62522]: value = "task-2416657" [ 1523.580961] env[62522]: _type = "Task" [ 1523.580961] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.588672] env[62522]: DEBUG oslo_vmware.api [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416657, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.090719] env[62522]: DEBUG oslo_vmware.api [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416657, 'name': ReconfigVM_Task, 'duration_secs': 0.155314} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.091087] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Reconfigured VM instance instance-00000078 to detach disk 2000 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1524.091764] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3487775-4542-45b3-a29e-f126bf1051cc {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.112821] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Reconfiguring VM instance instance-00000078 to attach disk [datastore2] 8c9f01e4-354d-4746-a3ac-f0895ba857ca/8c9f01e4-354d-4746-a3ac-f0895ba857ca.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1524.113384] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-448e4a81-28d3-4155-be86-a9fc4491dab0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.131389] env[62522]: DEBUG oslo_vmware.api [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1524.131389] env[62522]: value = "task-2416658" [ 1524.131389] env[62522]: _type = "Task" [ 1524.131389] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.141609] env[62522]: DEBUG oslo_vmware.api [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416658, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.641475] env[62522]: DEBUG oslo_vmware.api [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416658, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.141888] env[62522]: DEBUG oslo_vmware.api [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416658, 'name': ReconfigVM_Task, 'duration_secs': 0.802228} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.142175] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Reconfigured VM instance instance-00000078 to attach disk [datastore2] 8c9f01e4-354d-4746-a3ac-f0895ba857ca/8c9f01e4-354d-4746-a3ac-f0895ba857ca.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1525.142443] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Updating instance '8c9f01e4-354d-4746-a3ac-f0895ba857ca' progress to 50 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1525.648730] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62f7c5e8-5e97-4b34-b3bd-446b4ff84796 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.667305] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae70b0c4-86c4-48f4-b3f6-537ed8ca4c3b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.685013] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Updating instance '8c9f01e4-354d-4746-a3ac-f0895ba857ca' progress to 67 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1526.273562] env[62522]: DEBUG nova.network.neutron [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Port 71b25a9a-9768-4740-adf2-4b118bf2e559 binding to destination host cpu-1 is already ACTIVE {{(pid=62522) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1527.295343] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "8c9f01e4-354d-4746-a3ac-f0895ba857ca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1527.295686] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "8c9f01e4-354d-4746-a3ac-f0895ba857ca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1527.295686] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "8c9f01e4-354d-4746-a3ac-f0895ba857ca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1528.329031] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "refresh_cache-8c9f01e4-354d-4746-a3ac-f0895ba857ca" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1528.329277] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquired lock "refresh_cache-8c9f01e4-354d-4746-a3ac-f0895ba857ca" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1528.329427] env[62522]: DEBUG nova.network.neutron [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1529.047314] env[62522]: DEBUG nova.network.neutron [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Updating instance_info_cache with network_info: [{"id": "71b25a9a-9768-4740-adf2-4b118bf2e559", "address": "fa:16:3e:22:69:0a", "network": {"id": "e3153dad-6ab7-45a3-bda4-5ebbd95258f4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-521501740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab9d5d3c27d4c218b88e4a029300a66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71b25a9a-97", "ovs_interfaceid": "71b25a9a-9768-4740-adf2-4b118bf2e559", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1529.550733] env[62522]: DEBUG oslo_concurrency.lockutils [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Releasing lock "refresh_cache-8c9f01e4-354d-4746-a3ac-f0895ba857ca" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1530.075353] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c15e718-ca34-4c0b-bd78-58420a889c78 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.095319] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78f740f0-4abf-4789-9155-e11be8ab1432 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.102081] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Updating instance '8c9f01e4-354d-4746-a3ac-f0895ba857ca' progress to 83 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1530.607888] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1530.608278] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-84709766-7ed7-41b9-83b3-31ab73159ded {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.615755] env[62522]: DEBUG oslo_vmware.api [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1530.615755] env[62522]: value = "task-2416659" [ 1530.615755] env[62522]: _type = "Task" [ 1530.615755] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.623329] env[62522]: DEBUG oslo_vmware.api [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416659, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.128741] env[62522]: DEBUG oslo_vmware.api [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416659, 'name': PowerOnVM_Task, 'duration_secs': 0.367582} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.129098] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1531.129362] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-3b435954-e749-41f3-b703-126388aab42f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Updating instance '8c9f01e4-354d-4746-a3ac-f0895ba857ca' progress to 100 {{(pid=62522) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1533.523957] env[62522]: DEBUG nova.network.neutron [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Port 71b25a9a-9768-4740-adf2-4b118bf2e559 binding to destination host cpu-1 is already ACTIVE {{(pid=62522) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1533.524262] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "refresh_cache-8c9f01e4-354d-4746-a3ac-f0895ba857ca" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1533.524396] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquired lock "refresh_cache-8c9f01e4-354d-4746-a3ac-f0895ba857ca" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1533.524590] env[62522]: DEBUG nova.network.neutron [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1534.275707] env[62522]: DEBUG nova.network.neutron [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Updating instance_info_cache with network_info: [{"id": "71b25a9a-9768-4740-adf2-4b118bf2e559", "address": "fa:16:3e:22:69:0a", "network": {"id": "e3153dad-6ab7-45a3-bda4-5ebbd95258f4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-521501740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab9d5d3c27d4c218b88e4a029300a66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71b25a9a-97", "ovs_interfaceid": "71b25a9a-9768-4740-adf2-4b118bf2e559", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1534.778075] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Releasing lock "refresh_cache-8c9f01e4-354d-4746-a3ac-f0895ba857ca" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1535.282110] env[62522]: DEBUG nova.compute.manager [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62522) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1535.282370] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1535.282612] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1535.786745] env[62522]: DEBUG nova.objects.instance [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lazy-loading 'migration_context' on Instance uuid 8c9f01e4-354d-4746-a3ac-f0895ba857ca {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1536.332785] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad98f78b-788f-4295-b9ba-0fa627a6b4ce {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.340335] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e35149-c296-4b79-896f-eada846c9e68 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.369539] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41d53920-aeee-4a98-867e-8cb6267a3bf5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.376368] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c3386d3-df01-4183-9dbc-6f4fe4710d71 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.389876] env[62522]: DEBUG nova.compute.provider_tree [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1536.892716] env[62522]: DEBUG nova.scheduler.client.report [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1537.905439] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.623s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1539.438547] env[62522]: INFO nova.compute.manager [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Swapping old allocation on dict_keys(['c7fa38b2-245d-4337-a012-22c1a01c0a72']) held by migration ad6c2f50-520b-482a-b99b-8861598e1b02 for instance [ 1539.459855] env[62522]: DEBUG nova.scheduler.client.report [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Overwriting current allocation {'allocations': {'c7fa38b2-245d-4337-a012-22c1a01c0a72': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 181}}, 'project_id': 'bab9d5d3c27d4c218b88e4a029300a66', 'user_id': '607183068c444260afbec94a63fde1d4', 'consumer_generation': 1} on consumer 8c9f01e4-354d-4746-a3ac-f0895ba857ca {{(pid=62522) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1539.542629] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "refresh_cache-8c9f01e4-354d-4746-a3ac-f0895ba857ca" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1539.542820] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquired lock "refresh_cache-8c9f01e4-354d-4746-a3ac-f0895ba857ca" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1539.542996] env[62522]: DEBUG nova.network.neutron [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1540.454159] env[62522]: DEBUG nova.network.neutron [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Updating instance_info_cache with network_info: [{"id": "71b25a9a-9768-4740-adf2-4b118bf2e559", "address": "fa:16:3e:22:69:0a", "network": {"id": "e3153dad-6ab7-45a3-bda4-5ebbd95258f4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-521501740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab9d5d3c27d4c218b88e4a029300a66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71b25a9a-97", "ovs_interfaceid": "71b25a9a-9768-4740-adf2-4b118bf2e559", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1540.957180] env[62522]: DEBUG oslo_concurrency.lockutils [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Releasing lock "refresh_cache-8c9f01e4-354d-4746-a3ac-f0895ba857ca" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1540.957628] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1540.957946] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-12bd9556-4cfb-4a37-9833-91b90a5638fb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.965421] env[62522]: DEBUG oslo_vmware.api [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1540.965421] env[62522]: value = "task-2416660" [ 1540.965421] env[62522]: _type = "Task" [ 1540.965421] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.973831] env[62522]: DEBUG oslo_vmware.api [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416660, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.475467] env[62522]: DEBUG oslo_vmware.api [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416660, 'name': PowerOffVM_Task, 'duration_secs': 0.157444} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.475868] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1541.476404] env[62522]: DEBUG nova.virt.hardware [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1541.476621] env[62522]: DEBUG nova.virt.hardware [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1541.476807] env[62522]: DEBUG nova.virt.hardware [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1541.477017] env[62522]: DEBUG nova.virt.hardware [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1541.477167] env[62522]: DEBUG nova.virt.hardware [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1541.477313] env[62522]: DEBUG nova.virt.hardware [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1541.477515] env[62522]: DEBUG nova.virt.hardware [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1541.477689] env[62522]: DEBUG nova.virt.hardware [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1541.477913] env[62522]: DEBUG nova.virt.hardware [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1541.478102] env[62522]: DEBUG nova.virt.hardware [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1541.478280] env[62522]: DEBUG nova.virt.hardware [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1541.483116] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f3e15603-f1bd-4ec6-a349-796627028ba6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.498829] env[62522]: DEBUG oslo_vmware.api [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1541.498829] env[62522]: value = "task-2416661" [ 1541.498829] env[62522]: _type = "Task" [ 1541.498829] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.506054] env[62522]: DEBUG oslo_vmware.api [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416661, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.010195] env[62522]: DEBUG oslo_vmware.api [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416661, 'name': ReconfigVM_Task, 'duration_secs': 0.133799} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.010953] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-948874e8-04b9-4ec7-8b82-d8cba9765389 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.030729] env[62522]: DEBUG nova.virt.hardware [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1542.030984] env[62522]: DEBUG nova.virt.hardware [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1542.031113] env[62522]: DEBUG nova.virt.hardware [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1542.031293] env[62522]: DEBUG nova.virt.hardware [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1542.031435] env[62522]: DEBUG nova.virt.hardware [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1542.031584] env[62522]: DEBUG nova.virt.hardware [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1542.031776] env[62522]: DEBUG nova.virt.hardware [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1542.031934] env[62522]: DEBUG nova.virt.hardware [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1542.032116] env[62522]: DEBUG nova.virt.hardware [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1542.032272] env[62522]: DEBUG nova.virt.hardware [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1542.032441] env[62522]: DEBUG nova.virt.hardware [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1542.033181] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1612de8e-c9c1-4c76-80c6-3ff96fc9ad78 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.038070] env[62522]: DEBUG oslo_vmware.api [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1542.038070] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525c8bbb-b2dd-5a57-eeec-922dc4bfdc91" [ 1542.038070] env[62522]: _type = "Task" [ 1542.038070] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.045455] env[62522]: DEBUG oslo_vmware.api [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525c8bbb-b2dd-5a57-eeec-922dc4bfdc91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.548085] env[62522]: DEBUG oslo_vmware.api [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]525c8bbb-b2dd-5a57-eeec-922dc4bfdc91, 'name': SearchDatastore_Task, 'duration_secs': 0.009602} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.553204] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Reconfiguring VM instance instance-00000078 to detach disk 2000 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1542.553456] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db68cf87-cd46-4ed3-ae14-443920a5791f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.571193] env[62522]: DEBUG oslo_vmware.api [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1542.571193] env[62522]: value = "task-2416662" [ 1542.571193] env[62522]: _type = "Task" [ 1542.571193] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.578496] env[62522]: DEBUG oslo_vmware.api [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416662, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.820578] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1542.820907] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1543.081016] env[62522]: DEBUG oslo_vmware.api [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416662, 'name': ReconfigVM_Task, 'duration_secs': 0.195188} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.081261] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Reconfigured VM instance instance-00000078 to detach disk 2000 {{(pid=62522) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1543.082054] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cfda6c7-9b4a-4bdf-afdc-ddf70e49c914 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.103240] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Reconfiguring VM instance instance-00000078 to attach disk [datastore2] 8c9f01e4-354d-4746-a3ac-f0895ba857ca/8c9f01e4-354d-4746-a3ac-f0895ba857ca.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1543.103472] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-752548df-dfb6-42e9-b5b9-7d47edc1ca4e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.120592] env[62522]: DEBUG oslo_vmware.api [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1543.120592] env[62522]: value = "task-2416663" [ 1543.120592] env[62522]: _type = "Task" [ 1543.120592] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.127986] env[62522]: DEBUG oslo_vmware.api [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416663, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.325726] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1543.325932] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Starting heal instance info cache {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1543.325985] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Rebuilding the list of instances to heal {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 1543.629872] env[62522]: DEBUG oslo_vmware.api [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416663, 'name': ReconfigVM_Task, 'duration_secs': 0.306346} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.630279] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Reconfigured VM instance instance-00000078 to attach disk [datastore2] 8c9f01e4-354d-4746-a3ac-f0895ba857ca/8c9f01e4-354d-4746-a3ac-f0895ba857ca.vmdk or device None with type thin {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1543.630936] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78d076e2-558a-4ff0-8b4d-be6af2aa97ed {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.647939] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7965823f-6ab9-4f98-920b-f8e93acf7110 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.665267] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d11d2b5c-80a5-4408-be94-e14e9d31079a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.681614] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a318c93-d705-411d-b47d-d3856412b321 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.687611] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1543.687824] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2569d2b2-4bd2-4880-abd0-65d9e3c2f8d6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.693589] env[62522]: DEBUG oslo_vmware.api [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1543.693589] env[62522]: value = "task-2416664" [ 1543.693589] env[62522]: _type = "Task" [ 1543.693589] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.700227] env[62522]: DEBUG oslo_vmware.api [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416664, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.829547] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "refresh_cache-8c9f01e4-354d-4746-a3ac-f0895ba857ca" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1543.829764] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquired lock "refresh_cache-8c9f01e4-354d-4746-a3ac-f0895ba857ca" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1543.829820] env[62522]: DEBUG nova.network.neutron [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Forcefully refreshing network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1543.829974] env[62522]: DEBUG nova.objects.instance [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lazy-loading 'info_cache' on Instance uuid 8c9f01e4-354d-4746-a3ac-f0895ba857ca {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1544.203321] env[62522]: DEBUG oslo_vmware.api [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416664, 'name': PowerOnVM_Task, 'duration_secs': 0.377901} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.203595] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1545.216016] env[62522]: INFO nova.compute.manager [None req-ae02bc47-aa7d-4181-8239-e4c1be0a7253 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Updating instance to original state: 'active' [ 1545.536239] env[62522]: DEBUG nova.network.neutron [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Updating instance_info_cache with network_info: [{"id": "71b25a9a-9768-4740-adf2-4b118bf2e559", "address": "fa:16:3e:22:69:0a", "network": {"id": "e3153dad-6ab7-45a3-bda4-5ebbd95258f4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-521501740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab9d5d3c27d4c218b88e4a029300a66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71b25a9a-97", "ovs_interfaceid": "71b25a9a-9768-4740-adf2-4b118bf2e559", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1546.038655] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Releasing lock "refresh_cache-8c9f01e4-354d-4746-a3ac-f0895ba857ca" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1546.038915] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Updated the network info_cache for instance {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 1546.039089] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1546.039256] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1546.039406] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1546.039555] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1546.039698] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1546.039843] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1546.039970] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62522) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1546.040127] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1546.544061] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1546.544061] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1546.544061] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1546.544061] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62522) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1546.544628] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b46cda-99d1-453d-9b59-0893f9c0eb07 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.555304] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-240a01e3-fa3b-4187-af5f-52599f111c85 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.573056] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba743d25-7ec2-427d-a17e-971d350c6644 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.580507] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e871997e-5f15-42fd-96b8-557618a84b38 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.610034] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180973MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=62522) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1546.610214] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1546.610431] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1547.062898] env[62522]: DEBUG oslo_concurrency.lockutils [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "8c9f01e4-354d-4746-a3ac-f0895ba857ca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1547.063184] env[62522]: DEBUG oslo_concurrency.lockutils [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "8c9f01e4-354d-4746-a3ac-f0895ba857ca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1547.063401] env[62522]: DEBUG oslo_concurrency.lockutils [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "8c9f01e4-354d-4746-a3ac-f0895ba857ca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1547.063590] env[62522]: DEBUG oslo_concurrency.lockutils [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "8c9f01e4-354d-4746-a3ac-f0895ba857ca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1547.063764] env[62522]: DEBUG oslo_concurrency.lockutils [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "8c9f01e4-354d-4746-a3ac-f0895ba857ca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1547.065897] env[62522]: INFO nova.compute.manager [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Terminating instance [ 1547.570288] env[62522]: DEBUG nova.compute.manager [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1547.570708] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1547.571414] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66ca8e18-04b5-4314-a171-c419a6acde73 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.579307] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1547.579536] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7f95f989-257a-4b2f-8196-d308b83407c1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.585666] env[62522]: DEBUG oslo_vmware.api [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1547.585666] env[62522]: value = "task-2416665" [ 1547.585666] env[62522]: _type = "Task" [ 1547.585666] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.594214] env[62522]: DEBUG oslo_vmware.api [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416665, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.633486] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 8c9f01e4-354d-4746-a3ac-f0895ba857ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1547.633693] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62522) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1547.633835] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62522) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1547.660824] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e17ba1f8-8908-418c-8dd5-b0e35169ab79 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.668590] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af6eca45-c7d3-4ef0-8b12-e2515a600cf5 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.699043] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e2c4848-d046-4a28-9a36-0205d4625bf6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.707522] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21cb1ba2-2410-461a-9cf1-e4599fd2a845 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.721609] env[62522]: DEBUG nova.compute.provider_tree [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1548.095518] env[62522]: DEBUG oslo_vmware.api [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416665, 'name': PowerOffVM_Task, 'duration_secs': 0.175543} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.095786] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1548.095954] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1548.096218] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bf497a65-7774-421e-8896-a8aecbe83a2e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.163074] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1548.163404] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1548.163678] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Deleting the datastore file [datastore2] 8c9f01e4-354d-4746-a3ac-f0895ba857ca {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1548.164028] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7c1f880c-fb6b-4023-9c39-a159da1466bd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.170292] env[62522]: DEBUG oslo_vmware.api [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1548.170292] env[62522]: value = "task-2416667" [ 1548.170292] env[62522]: _type = "Task" [ 1548.170292] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.177625] env[62522]: DEBUG oslo_vmware.api [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416667, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.224599] env[62522]: DEBUG nova.scheduler.client.report [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1548.678991] env[62522]: DEBUG oslo_vmware.api [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416667, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.153681} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.679398] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1548.679530] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1548.679652] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1548.679826] env[62522]: INFO nova.compute.manager [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1548.680083] env[62522]: DEBUG oslo.service.loopingcall [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1548.680281] env[62522]: DEBUG nova.compute.manager [-] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1548.680377] env[62522]: DEBUG nova.network.neutron [-] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1548.729013] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62522) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1548.729238] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.119s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1549.205691] env[62522]: DEBUG nova.compute.manager [req-ab99fe36-8468-41d3-8e94-17d542b4888d req-b46f0081-0d7a-4f54-9259-6794b9bd0e44 service nova] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Received event network-vif-deleted-71b25a9a-9768-4740-adf2-4b118bf2e559 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1549.206092] env[62522]: INFO nova.compute.manager [req-ab99fe36-8468-41d3-8e94-17d542b4888d req-b46f0081-0d7a-4f54-9259-6794b9bd0e44 service nova] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Neutron deleted interface 71b25a9a-9768-4740-adf2-4b118bf2e559; detaching it from the instance and deleting it from the info cache [ 1549.206092] env[62522]: DEBUG nova.network.neutron [req-ab99fe36-8468-41d3-8e94-17d542b4888d req-b46f0081-0d7a-4f54-9259-6794b9bd0e44 service nova] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1549.624218] env[62522]: DEBUG nova.network.neutron [-] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1549.708246] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a2634205-d3e8-4526-aa2c-799167450258 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.719219] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ab34215-a0df-4ece-9105-7c5f6f3e6cfd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.742896] env[62522]: DEBUG nova.compute.manager [req-ab99fe36-8468-41d3-8e94-17d542b4888d req-b46f0081-0d7a-4f54-9259-6794b9bd0e44 service nova] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Detach interface failed, port_id=71b25a9a-9768-4740-adf2-4b118bf2e559, reason: Instance 8c9f01e4-354d-4746-a3ac-f0895ba857ca could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1550.127240] env[62522]: INFO nova.compute.manager [-] [instance: 8c9f01e4-354d-4746-a3ac-f0895ba857ca] Took 1.45 seconds to deallocate network for instance. [ 1550.633992] env[62522]: DEBUG oslo_concurrency.lockutils [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1550.634282] env[62522]: DEBUG oslo_concurrency.lockutils [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1550.634500] env[62522]: DEBUG nova.objects.instance [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lazy-loading 'resources' on Instance uuid 8c9f01e4-354d-4746-a3ac-f0895ba857ca {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1551.168901] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddd9c8dd-37fc-4cb5-acb8-b218e11126cf {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.176444] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-599b70b6-fd1e-4f55-b148-ec1b4c54ccf7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.205130] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ec0c50-a960-4f08-b76c-a4bcf6b5a23e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.211954] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55bf5108-9ce5-461f-83e5-cee0405998bb {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.225724] env[62522]: DEBUG nova.compute.provider_tree [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1551.728810] env[62522]: DEBUG nova.scheduler.client.report [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1552.234224] env[62522]: DEBUG oslo_concurrency.lockutils [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.600s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1552.251059] env[62522]: INFO nova.scheduler.client.report [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Deleted allocations for instance 8c9f01e4-354d-4746-a3ac-f0895ba857ca [ 1552.759292] env[62522]: DEBUG oslo_concurrency.lockutils [None req-361e8cd7-1587-4e79-ab6b-15dc9342b531 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "8c9f01e4-354d-4746-a3ac-f0895ba857ca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.696s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1554.100795] env[62522]: DEBUG oslo_concurrency.lockutils [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "31020a94-94fd-4cd3-971b-ac78dcad7417" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1554.101133] env[62522]: DEBUG oslo_concurrency.lockutils [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "31020a94-94fd-4cd3-971b-ac78dcad7417" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1554.603319] env[62522]: DEBUG nova.compute.manager [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Starting instance... {{(pid=62522) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1555.123282] env[62522]: DEBUG oslo_concurrency.lockutils [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1555.123539] env[62522]: DEBUG oslo_concurrency.lockutils [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.125065] env[62522]: INFO nova.compute.claims [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1556.161504] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3bb2b1c-5abc-45d7-b988-a990252dad07 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.168996] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b91e02ce-643d-46ab-b8d8-c2fa3a31f6e4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.198794] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c855b83-8e95-46c5-a9e6-97905946c50a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.205496] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bad14e5f-6478-441d-a391-b56541fb9334 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.217915] env[62522]: DEBUG nova.compute.provider_tree [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1556.720995] env[62522]: DEBUG nova.scheduler.client.report [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1557.225851] env[62522]: DEBUG oslo_concurrency.lockutils [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.102s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1557.226455] env[62522]: DEBUG nova.compute.manager [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Start building networks asynchronously for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1557.731216] env[62522]: DEBUG nova.compute.utils [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Using /dev/sd instead of None {{(pid=62522) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1557.732653] env[62522]: DEBUG nova.compute.manager [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Allocating IP information in the background. {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1557.732831] env[62522]: DEBUG nova.network.neutron [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] allocate_for_instance() {{(pid=62522) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1557.787280] env[62522]: DEBUG nova.policy [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '607183068c444260afbec94a63fde1d4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bab9d5d3c27d4c218b88e4a029300a66', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62522) authorize /opt/stack/nova/nova/policy.py:192}} [ 1558.047504] env[62522]: DEBUG nova.network.neutron [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Successfully created port: 5a153fcd-0cb1-47f2-ac65-0ff69f4743e2 {{(pid=62522) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1558.236050] env[62522]: DEBUG nova.compute.manager [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Start building block device mappings for instance. {{(pid=62522) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1559.246288] env[62522]: DEBUG nova.compute.manager [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Start spawning the instance on the hypervisor. {{(pid=62522) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1559.273577] env[62522]: DEBUG nova.virt.hardware [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-10T12:17:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-10T12:16:44Z,direct_url=,disk_format='vmdk',id=2ee4561b-ba48-4f45-82f6-eac89be98290,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c1482f918afe4561b7accd9759bb88ad',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-10T12:16:44Z,virtual_size=,visibility=), allow threads: False {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1559.273817] env[62522]: DEBUG nova.virt.hardware [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Flavor limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1559.273977] env[62522]: DEBUG nova.virt.hardware [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Image limits 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1559.274178] env[62522]: DEBUG nova.virt.hardware [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Flavor pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1559.274328] env[62522]: DEBUG nova.virt.hardware [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Image pref 0:0:0 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1559.274475] env[62522]: DEBUG nova.virt.hardware [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62522) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1559.274788] env[62522]: DEBUG nova.virt.hardware [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1559.274961] env[62522]: DEBUG nova.virt.hardware [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1559.275148] env[62522]: DEBUG nova.virt.hardware [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Got 1 possible topologies {{(pid=62522) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1559.275314] env[62522]: DEBUG nova.virt.hardware [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1559.275485] env[62522]: DEBUG nova.virt.hardware [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62522) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1559.276353] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18ac123d-7df0-404e-b21e-1a367b76f22f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.284096] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aad84982-1b86-4077-8b07-0491e5b271e6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.407479] env[62522]: DEBUG nova.compute.manager [req-52e509a4-9372-4c84-9b0a-b5676f1ad7fb req-0ab53abc-f9d6-46ca-95dc-aca6ae282e3b service nova] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Received event network-vif-plugged-5a153fcd-0cb1-47f2-ac65-0ff69f4743e2 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1559.407698] env[62522]: DEBUG oslo_concurrency.lockutils [req-52e509a4-9372-4c84-9b0a-b5676f1ad7fb req-0ab53abc-f9d6-46ca-95dc-aca6ae282e3b service nova] Acquiring lock "31020a94-94fd-4cd3-971b-ac78dcad7417-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1559.407937] env[62522]: DEBUG oslo_concurrency.lockutils [req-52e509a4-9372-4c84-9b0a-b5676f1ad7fb req-0ab53abc-f9d6-46ca-95dc-aca6ae282e3b service nova] Lock "31020a94-94fd-4cd3-971b-ac78dcad7417-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1559.408128] env[62522]: DEBUG oslo_concurrency.lockutils [req-52e509a4-9372-4c84-9b0a-b5676f1ad7fb req-0ab53abc-f9d6-46ca-95dc-aca6ae282e3b service nova] Lock "31020a94-94fd-4cd3-971b-ac78dcad7417-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1559.408299] env[62522]: DEBUG nova.compute.manager [req-52e509a4-9372-4c84-9b0a-b5676f1ad7fb req-0ab53abc-f9d6-46ca-95dc-aca6ae282e3b service nova] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] No waiting events found dispatching network-vif-plugged-5a153fcd-0cb1-47f2-ac65-0ff69f4743e2 {{(pid=62522) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1559.408429] env[62522]: WARNING nova.compute.manager [req-52e509a4-9372-4c84-9b0a-b5676f1ad7fb req-0ab53abc-f9d6-46ca-95dc-aca6ae282e3b service nova] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Received unexpected event network-vif-plugged-5a153fcd-0cb1-47f2-ac65-0ff69f4743e2 for instance with vm_state building and task_state spawning. [ 1559.493373] env[62522]: DEBUG nova.network.neutron [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Successfully updated port: 5a153fcd-0cb1-47f2-ac65-0ff69f4743e2 {{(pid=62522) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1559.995679] env[62522]: DEBUG oslo_concurrency.lockutils [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "refresh_cache-31020a94-94fd-4cd3-971b-ac78dcad7417" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1559.995679] env[62522]: DEBUG oslo_concurrency.lockutils [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquired lock "refresh_cache-31020a94-94fd-4cd3-971b-ac78dcad7417" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1559.995779] env[62522]: DEBUG nova.network.neutron [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1560.526584] env[62522]: DEBUG nova.network.neutron [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Instance cache missing network info. {{(pid=62522) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1560.646111] env[62522]: DEBUG nova.network.neutron [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Updating instance_info_cache with network_info: [{"id": "5a153fcd-0cb1-47f2-ac65-0ff69f4743e2", "address": "fa:16:3e:bd:ec:5b", "network": {"id": "e3153dad-6ab7-45a3-bda4-5ebbd95258f4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-521501740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab9d5d3c27d4c218b88e4a029300a66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a153fcd-0c", "ovs_interfaceid": "5a153fcd-0cb1-47f2-ac65-0ff69f4743e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1561.148703] env[62522]: DEBUG oslo_concurrency.lockutils [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Releasing lock "refresh_cache-31020a94-94fd-4cd3-971b-ac78dcad7417" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1561.149054] env[62522]: DEBUG nova.compute.manager [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Instance network_info: |[{"id": "5a153fcd-0cb1-47f2-ac65-0ff69f4743e2", "address": "fa:16:3e:bd:ec:5b", "network": {"id": "e3153dad-6ab7-45a3-bda4-5ebbd95258f4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-521501740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab9d5d3c27d4c218b88e4a029300a66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a153fcd-0c", "ovs_interfaceid": "5a153fcd-0cb1-47f2-ac65-0ff69f4743e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62522) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1561.149511] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bd:ec:5b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f625f389-b7cf-49b9-998a-87f3a9e3f234', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5a153fcd-0cb1-47f2-ac65-0ff69f4743e2', 'vif_model': 'vmxnet3'}] {{(pid=62522) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1561.157029] env[62522]: DEBUG oslo.service.loopingcall [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1561.157234] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Creating VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1561.157457] env[62522]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-12b1a841-c4da-44f8-ba9c-f09cd74e5b12 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.177218] env[62522]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1561.177218] env[62522]: value = "task-2416668" [ 1561.177218] env[62522]: _type = "Task" [ 1561.177218] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.184661] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416668, 'name': CreateVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.432908] env[62522]: DEBUG nova.compute.manager [req-01696e4c-1661-42f5-8e5b-878f92014255 req-8728fe1c-56e2-428d-9861-b3663a7e226d service nova] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Received event network-changed-5a153fcd-0cb1-47f2-ac65-0ff69f4743e2 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1561.433174] env[62522]: DEBUG nova.compute.manager [req-01696e4c-1661-42f5-8e5b-878f92014255 req-8728fe1c-56e2-428d-9861-b3663a7e226d service nova] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Refreshing instance network info cache due to event network-changed-5a153fcd-0cb1-47f2-ac65-0ff69f4743e2. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1561.433370] env[62522]: DEBUG oslo_concurrency.lockutils [req-01696e4c-1661-42f5-8e5b-878f92014255 req-8728fe1c-56e2-428d-9861-b3663a7e226d service nova] Acquiring lock "refresh_cache-31020a94-94fd-4cd3-971b-ac78dcad7417" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1561.433549] env[62522]: DEBUG oslo_concurrency.lockutils [req-01696e4c-1661-42f5-8e5b-878f92014255 req-8728fe1c-56e2-428d-9861-b3663a7e226d service nova] Acquired lock "refresh_cache-31020a94-94fd-4cd3-971b-ac78dcad7417" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1561.433723] env[62522]: DEBUG nova.network.neutron [req-01696e4c-1661-42f5-8e5b-878f92014255 req-8728fe1c-56e2-428d-9861-b3663a7e226d service nova] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Refreshing network info cache for port 5a153fcd-0cb1-47f2-ac65-0ff69f4743e2 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1561.687154] env[62522]: DEBUG oslo_vmware.api [-] Task: {'id': task-2416668, 'name': CreateVM_Task, 'duration_secs': 0.312839} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.687499] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Created VM on the ESX host {{(pid=62522) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1561.687937] env[62522]: DEBUG oslo_concurrency.lockutils [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1561.688115] env[62522]: DEBUG oslo_concurrency.lockutils [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1561.688444] env[62522]: DEBUG oslo_concurrency.lockutils [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1561.688690] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf886099-e3cf-41dd-80a1-e9f6b77489b2 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.692825] env[62522]: DEBUG oslo_vmware.api [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1561.692825] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d28fe5-be4a-5997-80fd-b2e1d50483d5" [ 1561.692825] env[62522]: _type = "Task" [ 1561.692825] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.700040] env[62522]: DEBUG oslo_vmware.api [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d28fe5-be4a-5997-80fd-b2e1d50483d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.120927] env[62522]: DEBUG nova.network.neutron [req-01696e4c-1661-42f5-8e5b-878f92014255 req-8728fe1c-56e2-428d-9861-b3663a7e226d service nova] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Updated VIF entry in instance network info cache for port 5a153fcd-0cb1-47f2-ac65-0ff69f4743e2. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1562.121301] env[62522]: DEBUG nova.network.neutron [req-01696e4c-1661-42f5-8e5b-878f92014255 req-8728fe1c-56e2-428d-9861-b3663a7e226d service nova] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Updating instance_info_cache with network_info: [{"id": "5a153fcd-0cb1-47f2-ac65-0ff69f4743e2", "address": "fa:16:3e:bd:ec:5b", "network": {"id": "e3153dad-6ab7-45a3-bda4-5ebbd95258f4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-521501740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab9d5d3c27d4c218b88e4a029300a66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a153fcd-0c", "ovs_interfaceid": "5a153fcd-0cb1-47f2-ac65-0ff69f4743e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1562.203793] env[62522]: DEBUG oslo_vmware.api [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]52d28fe5-be4a-5997-80fd-b2e1d50483d5, 'name': SearchDatastore_Task, 'duration_secs': 0.009704} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.204101] env[62522]: DEBUG oslo_concurrency.lockutils [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1562.204334] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Processing image 2ee4561b-ba48-4f45-82f6-eac89be98290 {{(pid=62522) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1562.204556] env[62522]: DEBUG oslo_concurrency.lockutils [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1562.204715] env[62522]: DEBUG oslo_concurrency.lockutils [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1562.204889] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1562.205145] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d8e7268a-248a-421c-81dc-f893271005e6 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.214900] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62522) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1562.215058] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62522) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1562.215718] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7148c813-5259-46bb-a86c-c9185ed9ca2f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.220383] env[62522]: DEBUG oslo_vmware.api [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1562.220383] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526959a8-8ce4-682b-53f4-19c01b340132" [ 1562.220383] env[62522]: _type = "Task" [ 1562.220383] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.227318] env[62522]: DEBUG oslo_vmware.api [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526959a8-8ce4-682b-53f4-19c01b340132, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.623876] env[62522]: DEBUG oslo_concurrency.lockutils [req-01696e4c-1661-42f5-8e5b-878f92014255 req-8728fe1c-56e2-428d-9861-b3663a7e226d service nova] Releasing lock "refresh_cache-31020a94-94fd-4cd3-971b-ac78dcad7417" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1562.730468] env[62522]: DEBUG oslo_vmware.api [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]526959a8-8ce4-682b-53f4-19c01b340132, 'name': SearchDatastore_Task, 'duration_secs': 0.008604} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.731224] env[62522]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88635d3c-0d22-4ce7-8f2b-e0d10d354dc9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.736488] env[62522]: DEBUG oslo_vmware.api [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1562.736488] env[62522]: value = "session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521dd467-2a81-7c5b-d784-4e51206a9840" [ 1562.736488] env[62522]: _type = "Task" [ 1562.736488] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.743562] env[62522]: DEBUG oslo_vmware.api [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521dd467-2a81-7c5b-d784-4e51206a9840, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.247072] env[62522]: DEBUG oslo_vmware.api [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': session[52e84e4c-4f44-b543-f316-bdfc55b18ff8]521dd467-2a81-7c5b-d784-4e51206a9840, 'name': SearchDatastore_Task, 'duration_secs': 0.009071} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.247072] env[62522]: DEBUG oslo_concurrency.lockutils [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1563.247285] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 31020a94-94fd-4cd3-971b-ac78dcad7417/31020a94-94fd-4cd3-971b-ac78dcad7417.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1563.247525] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-55e71526-2268-4f8f-8056-798a6d4aa4a3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.255244] env[62522]: DEBUG oslo_vmware.api [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1563.255244] env[62522]: value = "task-2416669" [ 1563.255244] env[62522]: _type = "Task" [ 1563.255244] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.262516] env[62522]: DEBUG oslo_vmware.api [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416669, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.764490] env[62522]: DEBUG oslo_vmware.api [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416669, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.417389} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.764869] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ee4561b-ba48-4f45-82f6-eac89be98290/2ee4561b-ba48-4f45-82f6-eac89be98290.vmdk to [datastore2] 31020a94-94fd-4cd3-971b-ac78dcad7417/31020a94-94fd-4cd3-971b-ac78dcad7417.vmdk {{(pid=62522) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1563.764970] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Extending root virtual disk to 1048576 {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1563.765200] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b714948d-b585-4cf3-b814-24fd5513a88f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.771843] env[62522]: DEBUG oslo_vmware.api [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1563.771843] env[62522]: value = "task-2416670" [ 1563.771843] env[62522]: _type = "Task" [ 1563.771843] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.778640] env[62522]: DEBUG oslo_vmware.api [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416670, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.281590] env[62522]: DEBUG oslo_vmware.api [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416670, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064463} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.281853] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Extended root virtual disk {{(pid=62522) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1564.283052] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9728983a-42fb-4207-9d96-df7afb7cf3ef {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.303750] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Reconfiguring VM instance instance-0000007a to attach disk [datastore2] 31020a94-94fd-4cd3-971b-ac78dcad7417/31020a94-94fd-4cd3-971b-ac78dcad7417.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1564.303993] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c9f4903-c009-49c3-8c61-50db05ae79c7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.323475] env[62522]: DEBUG oslo_vmware.api [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1564.323475] env[62522]: value = "task-2416671" [ 1564.323475] env[62522]: _type = "Task" [ 1564.323475] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.330912] env[62522]: DEBUG oslo_vmware.api [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416671, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.833923] env[62522]: DEBUG oslo_vmware.api [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416671, 'name': ReconfigVM_Task, 'duration_secs': 0.313268} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.835184] env[62522]: DEBUG nova.virt.vmwareapi.volumeops [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Reconfigured VM instance instance-0000007a to attach disk [datastore2] 31020a94-94fd-4cd3-971b-ac78dcad7417/31020a94-94fd-4cd3-971b-ac78dcad7417.vmdk or device None with type sparse {{(pid=62522) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1564.835742] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-28be7f38-f0cd-45a8-9604-26c60454a682 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.842169] env[62522]: DEBUG oslo_vmware.api [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1564.842169] env[62522]: value = "task-2416672" [ 1564.842169] env[62522]: _type = "Task" [ 1564.842169] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.849370] env[62522]: DEBUG oslo_vmware.api [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416672, 'name': Rename_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.351841] env[62522]: DEBUG oslo_vmware.api [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416672, 'name': Rename_Task, 'duration_secs': 0.130608} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.352226] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1565.352479] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8839e9d3-22b9-40fd-9035-7139c4b59233 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.358877] env[62522]: DEBUG oslo_vmware.api [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1565.358877] env[62522]: value = "task-2416673" [ 1565.358877] env[62522]: _type = "Task" [ 1565.358877] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.367098] env[62522]: DEBUG oslo_vmware.api [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416673, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.869100] env[62522]: DEBUG oslo_vmware.api [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416673, 'name': PowerOnVM_Task, 'duration_secs': 0.433028} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.869476] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1565.869553] env[62522]: INFO nova.compute.manager [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Took 6.62 seconds to spawn the instance on the hypervisor. [ 1565.869731] env[62522]: DEBUG nova.compute.manager [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1565.870497] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a833f9a-9c3a-4b06-a84f-4e921150aee1 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.390799] env[62522]: INFO nova.compute.manager [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Took 11.28 seconds to build instance. [ 1566.894593] env[62522]: DEBUG oslo_concurrency.lockutils [None req-32547a23-2361-4998-b4aa-2cfdf709d1e6 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "31020a94-94fd-4cd3-971b-ac78dcad7417" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.793s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1567.066676] env[62522]: DEBUG nova.compute.manager [req-08959479-57f8-4755-955c-74b82c53b620 req-2aa2043b-eef1-4ff5-ab66-39911d087d19 service nova] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Received event network-changed-5a153fcd-0cb1-47f2-ac65-0ff69f4743e2 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1567.066825] env[62522]: DEBUG nova.compute.manager [req-08959479-57f8-4755-955c-74b82c53b620 req-2aa2043b-eef1-4ff5-ab66-39911d087d19 service nova] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Refreshing instance network info cache due to event network-changed-5a153fcd-0cb1-47f2-ac65-0ff69f4743e2. {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1567.067073] env[62522]: DEBUG oslo_concurrency.lockutils [req-08959479-57f8-4755-955c-74b82c53b620 req-2aa2043b-eef1-4ff5-ab66-39911d087d19 service nova] Acquiring lock "refresh_cache-31020a94-94fd-4cd3-971b-ac78dcad7417" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1567.067227] env[62522]: DEBUG oslo_concurrency.lockutils [req-08959479-57f8-4755-955c-74b82c53b620 req-2aa2043b-eef1-4ff5-ab66-39911d087d19 service nova] Acquired lock "refresh_cache-31020a94-94fd-4cd3-971b-ac78dcad7417" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1567.067389] env[62522]: DEBUG nova.network.neutron [req-08959479-57f8-4755-955c-74b82c53b620 req-2aa2043b-eef1-4ff5-ab66-39911d087d19 service nova] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Refreshing network info cache for port 5a153fcd-0cb1-47f2-ac65-0ff69f4743e2 {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1567.777837] env[62522]: DEBUG nova.network.neutron [req-08959479-57f8-4755-955c-74b82c53b620 req-2aa2043b-eef1-4ff5-ab66-39911d087d19 service nova] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Updated VIF entry in instance network info cache for port 5a153fcd-0cb1-47f2-ac65-0ff69f4743e2. {{(pid=62522) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1567.778225] env[62522]: DEBUG nova.network.neutron [req-08959479-57f8-4755-955c-74b82c53b620 req-2aa2043b-eef1-4ff5-ab66-39911d087d19 service nova] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Updating instance_info_cache with network_info: [{"id": "5a153fcd-0cb1-47f2-ac65-0ff69f4743e2", "address": "fa:16:3e:bd:ec:5b", "network": {"id": "e3153dad-6ab7-45a3-bda4-5ebbd95258f4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-521501740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab9d5d3c27d4c218b88e4a029300a66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a153fcd-0c", "ovs_interfaceid": "5a153fcd-0cb1-47f2-ac65-0ff69f4743e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1568.280733] env[62522]: DEBUG oslo_concurrency.lockutils [req-08959479-57f8-4755-955c-74b82c53b620 req-2aa2043b-eef1-4ff5-ab66-39911d087d19 service nova] Releasing lock "refresh_cache-31020a94-94fd-4cd3-971b-ac78dcad7417" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1602.070435] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eb4c4691-6503-442a-9521-293e61de7271 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "31020a94-94fd-4cd3-971b-ac78dcad7417" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1602.070834] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eb4c4691-6503-442a-9521-293e61de7271 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "31020a94-94fd-4cd3-971b-ac78dcad7417" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1602.070883] env[62522]: DEBUG nova.compute.manager [None req-eb4c4691-6503-442a-9521-293e61de7271 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1602.071804] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e611cf4-9e13-4bab-a80e-244ae8126df7 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.078875] env[62522]: DEBUG nova.compute.manager [None req-eb4c4691-6503-442a-9521-293e61de7271 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62522) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1602.079457] env[62522]: DEBUG nova.objects.instance [None req-eb4c4691-6503-442a-9521-293e61de7271 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lazy-loading 'flavor' on Instance uuid 31020a94-94fd-4cd3-971b-ac78dcad7417 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1603.088550] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb4c4691-6503-442a-9521-293e61de7271 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1603.088936] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f803e403-73c6-429d-bab9-32ceb38e929c {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.097129] env[62522]: DEBUG oslo_vmware.api [None req-eb4c4691-6503-442a-9521-293e61de7271 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1603.097129] env[62522]: value = "task-2416674" [ 1603.097129] env[62522]: _type = "Task" [ 1603.097129] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.104494] env[62522]: DEBUG oslo_vmware.api [None req-eb4c4691-6503-442a-9521-293e61de7271 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416674, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.606790] env[62522]: DEBUG oslo_vmware.api [None req-eb4c4691-6503-442a-9521-293e61de7271 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416674, 'name': PowerOffVM_Task, 'duration_secs': 0.172297} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.607313] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb4c4691-6503-442a-9521-293e61de7271 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1603.607621] env[62522]: DEBUG nova.compute.manager [None req-eb4c4691-6503-442a-9521-293e61de7271 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1603.608448] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb42590-d773-4089-a7a7-9bea7306da1b {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.118977] env[62522]: DEBUG oslo_concurrency.lockutils [None req-eb4c4691-6503-442a-9521-293e61de7271 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "31020a94-94fd-4cd3-971b-ac78dcad7417" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.048s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1604.433351] env[62522]: DEBUG nova.objects.instance [None req-70cf1b54-2eaf-4bd6-adb5-7995b705fc2d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lazy-loading 'flavor' on Instance uuid 31020a94-94fd-4cd3-971b-ac78dcad7417 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1604.939334] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70cf1b54-2eaf-4bd6-adb5-7995b705fc2d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "refresh_cache-31020a94-94fd-4cd3-971b-ac78dcad7417" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1604.939552] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70cf1b54-2eaf-4bd6-adb5-7995b705fc2d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquired lock "refresh_cache-31020a94-94fd-4cd3-971b-ac78dcad7417" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1604.939668] env[62522]: DEBUG nova.network.neutron [None req-70cf1b54-2eaf-4bd6-adb5-7995b705fc2d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1604.939858] env[62522]: DEBUG nova.objects.instance [None req-70cf1b54-2eaf-4bd6-adb5-7995b705fc2d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lazy-loading 'info_cache' on Instance uuid 31020a94-94fd-4cd3-971b-ac78dcad7417 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1605.444196] env[62522]: DEBUG nova.objects.base [None req-70cf1b54-2eaf-4bd6-adb5-7995b705fc2d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Object Instance<31020a94-94fd-4cd3-971b-ac78dcad7417> lazy-loaded attributes: flavor,info_cache {{(pid=62522) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1606.144410] env[62522]: DEBUG nova.network.neutron [None req-70cf1b54-2eaf-4bd6-adb5-7995b705fc2d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Updating instance_info_cache with network_info: [{"id": "5a153fcd-0cb1-47f2-ac65-0ff69f4743e2", "address": "fa:16:3e:bd:ec:5b", "network": {"id": "e3153dad-6ab7-45a3-bda4-5ebbd95258f4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-521501740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab9d5d3c27d4c218b88e4a029300a66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a153fcd-0c", "ovs_interfaceid": "5a153fcd-0cb1-47f2-ac65-0ff69f4743e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1606.647144] env[62522]: DEBUG oslo_concurrency.lockutils [None req-70cf1b54-2eaf-4bd6-adb5-7995b705fc2d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Releasing lock "refresh_cache-31020a94-94fd-4cd3-971b-ac78dcad7417" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1607.653252] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-70cf1b54-2eaf-4bd6-adb5-7995b705fc2d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Powering on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1607.653682] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1a8b28a6-7360-4b61-b0a3-681fee56ed39 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.663298] env[62522]: DEBUG oslo_vmware.api [None req-70cf1b54-2eaf-4bd6-adb5-7995b705fc2d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1607.663298] env[62522]: value = "task-2416675" [ 1607.663298] env[62522]: _type = "Task" [ 1607.663298] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.670565] env[62522]: DEBUG oslo_vmware.api [None req-70cf1b54-2eaf-4bd6-adb5-7995b705fc2d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416675, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.172771] env[62522]: DEBUG oslo_vmware.api [None req-70cf1b54-2eaf-4bd6-adb5-7995b705fc2d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416675, 'name': PowerOnVM_Task, 'duration_secs': 0.386924} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.173081] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-70cf1b54-2eaf-4bd6-adb5-7995b705fc2d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Powered on the VM {{(pid=62522) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1608.173256] env[62522]: DEBUG nova.compute.manager [None req-70cf1b54-2eaf-4bd6-adb5-7995b705fc2d tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1608.174119] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e54de54-c0ec-41db-83ee-1e5d46fcca7e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.730326] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1608.730785] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1608.730932] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Starting heal instance info cache {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1608.731066] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Rebuilding the list of instances to heal {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 1609.168509] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd02ac05-2b6e-495b-8857-f886ea67595a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.175599] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-03abf80b-4325-49e5-99d1-c056e385609f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Suspending the VM {{(pid=62522) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1609.175830] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-5e5aa355-1b1c-4f8f-9814-f33c69747fc4 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.181734] env[62522]: DEBUG oslo_vmware.api [None req-03abf80b-4325-49e5-99d1-c056e385609f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1609.181734] env[62522]: value = "task-2416676" [ 1609.181734] env[62522]: _type = "Task" [ 1609.181734] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.189060] env[62522]: DEBUG oslo_vmware.api [None req-03abf80b-4325-49e5-99d1-c056e385609f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416676, 'name': SuspendVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.234458] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "refresh_cache-31020a94-94fd-4cd3-971b-ac78dcad7417" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1609.234672] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquired lock "refresh_cache-31020a94-94fd-4cd3-971b-ac78dcad7417" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1609.234745] env[62522]: DEBUG nova.network.neutron [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Forcefully refreshing network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1609.234888] env[62522]: DEBUG nova.objects.instance [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lazy-loading 'info_cache' on Instance uuid 31020a94-94fd-4cd3-971b-ac78dcad7417 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1609.691348] env[62522]: DEBUG oslo_vmware.api [None req-03abf80b-4325-49e5-99d1-c056e385609f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416676, 'name': SuspendVM_Task} progress is 75%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.191575] env[62522]: DEBUG oslo_vmware.api [None req-03abf80b-4325-49e5-99d1-c056e385609f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416676, 'name': SuspendVM_Task, 'duration_secs': 0.563115} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.191960] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-03abf80b-4325-49e5-99d1-c056e385609f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Suspended the VM {{(pid=62522) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1610.192040] env[62522]: DEBUG nova.compute.manager [None req-03abf80b-4325-49e5-99d1-c056e385609f tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1610.192813] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18fd47d3-e7d2-4f51-915e-e9f56e1398e3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.997790] env[62522]: DEBUG nova.network.neutron [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Updating instance_info_cache with network_info: [{"id": "5a153fcd-0cb1-47f2-ac65-0ff69f4743e2", "address": "fa:16:3e:bd:ec:5b", "network": {"id": "e3153dad-6ab7-45a3-bda4-5ebbd95258f4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-521501740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab9d5d3c27d4c218b88e4a029300a66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a153fcd-0c", "ovs_interfaceid": "5a153fcd-0cb1-47f2-ac65-0ff69f4743e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1611.500498] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Releasing lock "refresh_cache-31020a94-94fd-4cd3-971b-ac78dcad7417" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1611.500897] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Updated the network info_cache for instance {{(pid=62522) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 1611.500950] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1611.501088] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1611.501244] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1611.501396] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1611.501542] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1611.501690] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1611.501820] env[62522]: DEBUG nova.compute.manager [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62522) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1611.501962] env[62522]: DEBUG oslo_service.periodic_task [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62522) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1611.533687] env[62522]: INFO nova.compute.manager [None req-178c7568-6dc7-4fa5-aaf3-fc5394416e45 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Resuming [ 1611.534287] env[62522]: DEBUG nova.objects.instance [None req-178c7568-6dc7-4fa5-aaf3-fc5394416e45 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lazy-loading 'flavor' on Instance uuid 31020a94-94fd-4cd3-971b-ac78dcad7417 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1612.005554] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1612.005790] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1612.005964] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1612.006139] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62522) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1612.007420] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bafd3f8a-32f5-4c05-b9f0-fff71f5a1a75 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.015589] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fc8627f-e1fc-4e75-a4ad-c0388eb2d038 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.029023] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdb1254e-3cec-43a9-bbfb-f7af6f316d16 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.034868] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b278968f-ebc6-4af5-b518-23c17169f796 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.066276] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180999MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=62522) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1612.066410] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1612.066592] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1613.045025] env[62522]: DEBUG oslo_concurrency.lockutils [None req-178c7568-6dc7-4fa5-aaf3-fc5394416e45 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "refresh_cache-31020a94-94fd-4cd3-971b-ac78dcad7417" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1613.045440] env[62522]: DEBUG oslo_concurrency.lockutils [None req-178c7568-6dc7-4fa5-aaf3-fc5394416e45 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquired lock "refresh_cache-31020a94-94fd-4cd3-971b-ac78dcad7417" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1613.045440] env[62522]: DEBUG nova.network.neutron [None req-178c7568-6dc7-4fa5-aaf3-fc5394416e45 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Building network info cache for instance {{(pid=62522) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1613.090929] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Instance 31020a94-94fd-4cd3-971b-ac78dcad7417 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62522) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1613.091136] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62522) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1613.091282] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62522) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1613.115898] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edf2d962-1021-47f1-8372-0b8c913ce006 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.123549] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d0a9c45-2d2a-4e4a-a994-5ab4b43f3b9e {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.152302] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5a36956-62d0-401a-b06c-691b513f928f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.159119] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd52253d-abc0-4950-bd6d-3d0ef8da9c3a {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.171381] env[62522]: DEBUG nova.compute.provider_tree [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1613.674277] env[62522]: DEBUG nova.scheduler.client.report [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1613.745799] env[62522]: DEBUG nova.network.neutron [None req-178c7568-6dc7-4fa5-aaf3-fc5394416e45 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Updating instance_info_cache with network_info: [{"id": "5a153fcd-0cb1-47f2-ac65-0ff69f4743e2", "address": "fa:16:3e:bd:ec:5b", "network": {"id": "e3153dad-6ab7-45a3-bda4-5ebbd95258f4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-521501740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bab9d5d3c27d4c218b88e4a029300a66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f625f389-b7cf-49b9-998a-87f3a9e3f234", "external-id": "nsx-vlan-transportzone-280", "segmentation_id": 280, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a153fcd-0c", "ovs_interfaceid": "5a153fcd-0cb1-47f2-ac65-0ff69f4743e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1614.179154] env[62522]: DEBUG nova.compute.resource_tracker [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62522) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1614.179477] env[62522]: DEBUG oslo_concurrency.lockutils [None req-1097fae8-ca2a-457c-8d96-c7801bc80f13 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.113s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1614.248353] env[62522]: DEBUG oslo_concurrency.lockutils [None req-178c7568-6dc7-4fa5-aaf3-fc5394416e45 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Releasing lock "refresh_cache-31020a94-94fd-4cd3-971b-ac78dcad7417" {{(pid=62522) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1614.249336] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85f99b0c-ff30-49f4-89f2-fcdc417056d0 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.256809] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-178c7568-6dc7-4fa5-aaf3-fc5394416e45 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Resuming the VM {{(pid=62522) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1614.257059] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d6f58782-78b0-4c33-9b62-20f897f19896 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.263313] env[62522]: DEBUG oslo_vmware.api [None req-178c7568-6dc7-4fa5-aaf3-fc5394416e45 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1614.263313] env[62522]: value = "task-2416677" [ 1614.263313] env[62522]: _type = "Task" [ 1614.263313] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.270958] env[62522]: DEBUG oslo_vmware.api [None req-178c7568-6dc7-4fa5-aaf3-fc5394416e45 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416677, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.773635] env[62522]: DEBUG oslo_vmware.api [None req-178c7568-6dc7-4fa5-aaf3-fc5394416e45 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416677, 'name': PowerOnVM_Task, 'duration_secs': 0.490238} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.773941] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-178c7568-6dc7-4fa5-aaf3-fc5394416e45 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Resumed the VM {{(pid=62522) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1614.774137] env[62522]: DEBUG nova.compute.manager [None req-178c7568-6dc7-4fa5-aaf3-fc5394416e45 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Checking state {{(pid=62522) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1614.775011] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d92f72f4-a147-4a34-bcc7-e51995d107d9 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.179917] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "31020a94-94fd-4cd3-971b-ac78dcad7417" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1616.180343] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "31020a94-94fd-4cd3-971b-ac78dcad7417" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1616.180397] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "31020a94-94fd-4cd3-971b-ac78dcad7417-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1616.180555] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "31020a94-94fd-4cd3-971b-ac78dcad7417-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1616.180728] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "31020a94-94fd-4cd3-971b-ac78dcad7417-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1616.182724] env[62522]: INFO nova.compute.manager [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Terminating instance [ 1616.686721] env[62522]: DEBUG nova.compute.manager [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Start destroying the instance on the hypervisor. {{(pid=62522) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1616.686995] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Destroying instance {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1616.687930] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-985b2931-a94a-4b1f-9012-a395b6d496c3 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.695405] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Powering off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1616.695619] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e3a1d9dc-29c6-4166-8fbe-cf9168de8058 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.702017] env[62522]: DEBUG oslo_vmware.api [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1616.702017] env[62522]: value = "task-2416678" [ 1616.702017] env[62522]: _type = "Task" [ 1616.702017] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.709857] env[62522]: DEBUG oslo_vmware.api [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416678, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.212876] env[62522]: DEBUG oslo_vmware.api [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416678, 'name': PowerOffVM_Task, 'duration_secs': 0.185308} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.213286] env[62522]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Powered off the VM {{(pid=62522) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1617.213401] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Unregistering the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1617.213621] env[62522]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e7acbe19-58c4-4c71-80b5-d32034562452 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.274994] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Unregistered the VM {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1617.275274] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Deleting contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1617.275425] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Deleting the datastore file [datastore2] 31020a94-94fd-4cd3-971b-ac78dcad7417 {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1617.275688] env[62522]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8e01edd2-b027-476a-83bf-99fcccf516be {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.282881] env[62522]: DEBUG oslo_vmware.api [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for the task: (returnval){ [ 1617.282881] env[62522]: value = "task-2416680" [ 1617.282881] env[62522]: _type = "Task" [ 1617.282881] env[62522]: } to complete. {{(pid=62522) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.290388] env[62522]: DEBUG oslo_vmware.api [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416680, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.792541] env[62522]: DEBUG oslo_vmware.api [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Task: {'id': task-2416680, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144873} completed successfully. {{(pid=62522) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.792718] env[62522]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Deleted the datastore file {{(pid=62522) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1617.792906] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Deleted contents of the VM from datastore datastore2 {{(pid=62522) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1617.793097] env[62522]: DEBUG nova.virt.vmwareapi.vmops [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Instance destroyed {{(pid=62522) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1617.793271] env[62522]: INFO nova.compute.manager [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1617.793510] env[62522]: DEBUG oslo.service.loopingcall [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62522) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1617.793697] env[62522]: DEBUG nova.compute.manager [-] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Deallocating network for instance {{(pid=62522) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1617.793792] env[62522]: DEBUG nova.network.neutron [-] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] deallocate_for_instance() {{(pid=62522) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1618.245552] env[62522]: DEBUG nova.compute.manager [req-38a28d59-62a6-48b7-9ace-24da74815bef req-991b931e-ef46-4c48-aee1-cb426732c62e service nova] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Received event network-vif-deleted-5a153fcd-0cb1-47f2-ac65-0ff69f4743e2 {{(pid=62522) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1618.245820] env[62522]: INFO nova.compute.manager [req-38a28d59-62a6-48b7-9ace-24da74815bef req-991b931e-ef46-4c48-aee1-cb426732c62e service nova] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Neutron deleted interface 5a153fcd-0cb1-47f2-ac65-0ff69f4743e2; detaching it from the instance and deleting it from the info cache [ 1618.246033] env[62522]: DEBUG nova.network.neutron [req-38a28d59-62a6-48b7-9ace-24da74815bef req-991b931e-ef46-4c48-aee1-cb426732c62e service nova] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1618.729700] env[62522]: DEBUG nova.network.neutron [-] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Updating instance_info_cache with network_info: [] {{(pid=62522) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1618.748185] env[62522]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c8bf1c3a-8524-4917-a3ed-a534ccf81038 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.758367] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f3155d1-1c43-4977-a029-999cb4a1bc6f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.780929] env[62522]: DEBUG nova.compute.manager [req-38a28d59-62a6-48b7-9ace-24da74815bef req-991b931e-ef46-4c48-aee1-cb426732c62e service nova] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Detach interface failed, port_id=5a153fcd-0cb1-47f2-ac65-0ff69f4743e2, reason: Instance 31020a94-94fd-4cd3-971b-ac78dcad7417 could not be found. {{(pid=62522) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1619.232505] env[62522]: INFO nova.compute.manager [-] [instance: 31020a94-94fd-4cd3-971b-ac78dcad7417] Took 1.44 seconds to deallocate network for instance. [ 1619.739170] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1619.739571] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1619.739790] env[62522]: DEBUG nova.objects.instance [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lazy-loading 'resources' on Instance uuid 31020a94-94fd-4cd3-971b-ac78dcad7417 {{(pid=62522) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1620.277882] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-337914dc-161d-42a3-9b05-a36cf4906403 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.285672] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7ccacdc-b9b2-47d3-8231-41ecfbc65451 {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.316722] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6b002be-b76d-4169-b258-02bb44391bbd {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.323609] env[62522]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef3d78a8-e4dd-4b42-a683-c18a8c984e2f {{(pid=62522) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.992192] env[62522]: DEBUG nova.compute.provider_tree [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Inventory has not changed in ProviderTree for provider: c7fa38b2-245d-4337-a012-22c1a01c0a72 {{(pid=62522) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1621.495954] env[62522]: DEBUG nova.scheduler.client.report [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Inventory has not changed for provider c7fa38b2-245d-4337-a012-22c1a01c0a72 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62522) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1622.001382] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.262s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1622.021061] env[62522]: INFO nova.scheduler.client.report [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Deleted allocations for instance 31020a94-94fd-4cd3-971b-ac78dcad7417 [ 1622.528574] env[62522]: DEBUG oslo_concurrency.lockutils [None req-f5c9e742-9ce1-493a-a4e5-33a731cdc3a5 tempest-ServerActionsTestJSON-1767099261 tempest-ServerActionsTestJSON-1767099261-project-member] Lock "31020a94-94fd-4cd3-971b-ac78dcad7417" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.348s {{(pid=62522) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}}